branch_name
stringclasses 149
values | text
stringlengths 23
89.3M
| directory_id
stringlengths 40
40
| languages
listlengths 1
19
| num_files
int64 1
11.8k
| repo_language
stringclasses 38
values | repo_name
stringlengths 6
114
| revision_id
stringlengths 40
40
| snapshot_id
stringlengths 40
40
|
---|---|---|---|---|---|---|---|---|
refs/heads/master
|
<file_sep>#pragma once
#include "Module.h"
#include "Globals.h"
#define METERS_TO_PIXELS(pixels) { (int)(pixels * 100.0f)}
#define PIXEL_TO_METERS(meters) { (float)(meters * 0.1f) }
class b2World;
class ModulePhysics : public Module
{
public:
ModulePhysics(Application* app, bool start_enabled = true);
~ModulePhysics();
bool Start();
update_status PreUpdate();
update_status PostUpdate();
bool CleanUp();
private:
b2World *world;
bool debug;
};<file_sep>#include "PhysVehicle3D.h"
#include "Primitive.h"
#include "Bullet/include/btBulletDynamicsCommon.h"
// ----------------------------------------------------------------------------
VehicleInfo::~VehicleInfo()
{
//if(wheels != NULL)
//delete wheels;
}
// ----------------------------------------------------------------------------
PhysVehicle3D::PhysVehicle3D(btRigidBody* body, btRaycastVehicle* vehicle, const VehicleInfo& info) : PhysBody3D(body), vehicle(vehicle), info(info)
{
}
// ----------------------------------------------------------------------------
PhysVehicle3D::~PhysVehicle3D()
{
delete vehicle;
}
// ----------------------------------------------------------------------------
void PhysVehicle3D::Render()
{
Cylinder wheel;
wheel.color = Blue;
for(int i = 0; i < vehicle->getNumWheels(); ++i)
{
wheel.radius = info.wheels[0].radius;
wheel.height = info.wheels[0].width;
vehicle->updateWheelTransform(i);
vehicle->getWheelInfo(i).m_worldTransform.getOpenGLMatrix(&wheel.transform);
wheel.Render();
}
Cube chassis(info.chassis_size.x, info.chassis_size.y, info.chassis_size.z);
vehicle->getChassisWorldTransform().getOpenGLMatrix(&chassis.transform);
btQuaternion q = vehicle->getChassisWorldTransform().getRotation();
btVector3 offset(info.chassis_offset.x, info.chassis_offset.y, info.chassis_offset.z);
offset = offset.rotate(q.getAxis(), q.getAngle());
Cube front(info.front_size.x, info.front_size.y, info.front_size.z);
vehicle->getChassisWorldTransform().getOpenGLMatrix(&front.transform);
btQuaternion q1 = vehicle->getChassisWorldTransform().getRotation();
btVector3 foffset(info.front_offset.x, info.front_offset.y, info.front_offset.z);
foffset = foffset.rotate(q1.getAxis(), q1.getAngle());
Cube pale(info.pale_size.x, info.pale_size.y, info.pale_size.z);
vehicle->getChassisWorldTransform().getOpenGLMatrix(&pale.transform);
btQuaternion q2 = vehicle->getChassisWorldTransform().getRotation();
btVector3 poffset(info.pale_offset.x, info.pale_offset.y, info.pale_offset.z);
poffset = poffset.rotate(q2.getAxis(), q2.getAngle());
chassis.transform.M[12] += offset.getX();
chassis.transform.M[13] += offset.getY();
chassis.transform.M[14] += offset.getZ();
front.transform.M[12] += foffset.getX();
front.transform.M[13] += foffset.getY();
front.transform.M[14] += foffset.getZ();
pale.transform.M[12] += poffset.getX();
pale.transform.M[13] += poffset.getY();
pale.transform.M[14] += poffset.getZ();
chassis.Render();
front.Render();
pale.Render();
}
// ----------------------------------------------------------------------------
void PhysVehicle3D::ApplyEngineForce(float force)
{
for(int i = 0; i < vehicle->getNumWheels(); ++i)
{
if(info.wheels[i].drive == true)
{
vehicle->applyEngineForce(force, i);
}
}
}
// ----------------------------------------------------------------------------
void PhysVehicle3D::Brake(float force)
{
for(int i = 0; i < vehicle->getNumWheels(); ++i)
{
if(info.wheels[i].brake == true)
{
vehicle->setBrake(force, i);
}
}
}
// ----------------------------------------------------------------------------
void PhysVehicle3D::Turn(float degrees)
{
for(int i = 0; i < vehicle->getNumWheels(); ++i)
{
if(info.wheels[i].steering == true)
{
vehicle->setSteeringValue(degrees, i);
}
}
}
// ----------------------------------------------------------------------------
float PhysVehicle3D::GetKmh() const
{
return vehicle->getCurrentSpeedKmHour();
}
vec3 PhysVehicle3D::GetForwardVector()
{
btVector3 view_direction;
view_direction = vehicle->getForwardVector();
vec3 car_direction;
car_direction.Set(view_direction.x(), view_direction.y(), view_direction.z());
return car_direction;
}
|
1ac0dd8946a36ba342f86f695f096c8afe513744
|
[
"C++"
] | 2 |
C++
|
PolRecasensSarra/Box2D
|
b12294eacd93f2d02dd3e0c02c0b7ef379f17c46
|
67d703adac451c613cfad8e4a20abaaf8295d4c2
|
refs/heads/main
|
<file_sep>import { createApp } from "vue";
import App from "./App.vue";
import router from "./router";
import store from "./store";
import { components, plugins } from "./lib/elementUi";
const app = createApp(App);
components.forEach((component) => {
app.component(component.name, component);
});
plugins.forEach((plugin) => {
app.use(plugin);
});
app.use(store).use(router).mount("#app");
|
6e8ca6a419d65b4ea60b2ab2ab121da62dc042a9
|
[
"TypeScript"
] | 1 |
TypeScript
|
a47781402/vue-3.0-ts-x6-demo
|
f1d075e667ca2f8538d04e3e455f5b4f4353196d
|
8699b19f11473c5e308f56cc5af56458f81759ae
|
refs/heads/master
|
<repo_name>DroneBase/heroku-buildpack-awscli<file_sep>/bin/compile
#!/usr/bin/env bash
# bin/compile <build-dir> <cache-dir>
# fail fast
set -e
# debug
# set -x
# clean up leaking environment
unset GIT_DIR
echo "-----> Adding soelim to path"
export PATH=$PATH:$BUILD_DIR/.python-ldap/groff/bin
pip install awscli
|
c50d572fdab1065669ceb2aac255a13fcef282d3
|
[
"Shell"
] | 1 |
Shell
|
DroneBase/heroku-buildpack-awscli
|
e725506a4a396c8bfe2e6baaddd591ce5dc185be
|
c2e1ca69930c9576511a361e6c278f04dded2aa9
|
refs/heads/master
|
<repo_name>bria559/hw5-git-practice<file_sep>/hw5_git.py
print('Hello World!')
#here is my new comment/ modification
|
4fb3fe48562096373bb8e6b32f22a8b03bc8b0c5
|
[
"Python"
] | 1 |
Python
|
bria559/hw5-git-practice
|
a5f8245df875db5b35fa9f9ff1e5a8766988d44b
|
03af6098070085cca5f65585894c964015d13922
|
refs/heads/master
|
<repo_name>AndrewMO/BackupFiles<file_sep>/RefreshPerfDB/MakeCPDtoPerfOrg/18_ReceiptsRedesign.sql
use ACM01vegasJetty
IF not exists (select keywordvalue from SYSTEMINFO where KEYWORD = '<PASSWORD>')
BEGIN
insert into SYSTEMINFO (KEYWORD, KEYWORDVALUE) values ('enable_new_receipt_redirect', 'true')
END
ELSE
BEGIN
update systeminfo set KEYWORDVALUE = '<PASSWORD>' where KEYWORD = '<PASSWORD>'
END
select * from dbo.systeminfo where keyword = 'enable_new_receipt_redirect'<file_sep>/PythonProjects/PerfEnvRelated/Stage_CheckJVM.py
# -*- coding: utf-8 -*-
# !/usr/bin/python
import paramiko
import threading
def ssh2(host, username, passwd, cmd):
try:
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(host, 22, username, passwd, timeout=5)
for m in cmd:
stdin, stdout, stderr = ssh.exec_command(m)
# stdin.write("Y") #简单交互,输入 ‘Y’
out = stdout.readlines()
# 屏幕输出
for o in out:
#print(o)
# print("%s service status : %s" % (host, o))
print("\033[0;37;40m%s\033[0m : %s" % (host, o))
#print('%s\t start service OK\n' %(host))
ssh.close()
except Exception as e:
print('%s\tError\n:' %(host))
print(e)
if __name__ == '__main__':
# cmd = ['ps -ef|grep java|grep -v grep|grep 19.01.0.067'] # 你要执行的命令列表
cmd = ['ps -ef|grep java|grep -v grep|grep ActiveNetServlet1'] # 你要执行的命令列表
# cmd = ['ps -ef|grep java'] # 你要执行的命令列表
username = "deploy" # 用户名
passwd = "<PASSWORD>" # 密码
threads = [] # 多线程
print("Begin......Servlet")
for i in range(1, 3):
#stage-activenet-01w.an.dev.activenetwork.com
if i < 10:
host1 = 'stage-activenet-0' + str(i) + 'w.an.dev.activenetwork.com'
else:
host1 = 'stage-activenet-' + str(i) + 'w.an.dev.activenetwork.com'
a = threading.Thread(target=ssh2, args=(host1, username, passwd, cmd))
a.start()
print("Begin......Cache")
for i in range(1, 4):
#stage-activenet-ignite04w.an.dev.activenetwork.com
if i < 10:
host2 = 'stage-activenet-ignite0' + str(i) + 'w.an.dev.activenetwork.com'
else:
host2 = 'stage-activenet-ignite' + str(i) + 'w.an.dev.activenetwork.com'
b = threading.Thread(target=ssh2, args=(host2, username, passwd, cmd))
b.start()
<file_sep>/PythonProjects/PerfEnvRelated/Stage_Initialing.py
# -*- coding: utf-8 -*-
# !/usr/bin/python
import requests
import threading
import time
import logging
# logging.basicConfig(level=logging.DEBUG, format=' %(asctime)s - %(levelname)s - %(message)s')
# logging.disable(logging.CRITICAL)
def getResponse(url, org):
requests.adapters.DEFAULT_RETRIES = 5
# response = requests.get(url)
# response.raise_for_status()
time_start = time.time()
response = requests.get(url)
# response.raise_for_status()
time_end = time.time()
initial_time = time_end - time_start
server = url[23:25]
rsc = response.status_code
print("Org : %s ; Server : %s ; Status_Code : %r ; initial time : %r s ; hosturl : %s" % (org, server, rsc, initial_time, url))
if __name__ == '__main__' :
# logging.debug('start of program')
thread = []
orgname = ['lstgapachejunction','lstgbreckenridgerec','lstgcampbellrecreation','lstgchandleraz','lstgchesterfieldparksrec','lstgcityofcarlsbad','lstgcityofcorona','lstgcityofdowney','lstgculpepercopandr','lstgdenver','lstgebparks','lstgencinitasparksandrec','lstgfalmouthcommunityprog','lstgfpdccrecreation','lstggepark','lstggjparksandrec','lstgindymca','lstgkansascityymca','lstglanguagestars','lstglbparks','lstgmesaaz','lstgminneapolisparks','lstgmontgomerycounty','lstgmrurecreation','lstgnaparec','lstgnms','lstgnorthshoreymca','lstgomahaconservatory','lstgoneteamkids','lstgportlandparks','lstgrightatschool','lstgsanjoseparksandrec','lstgsdparkandrec','lstgsfcmprep','lstgymcagreaterbrandywine','lstgymcasatx']
# orgname = ['lstgapachejunction' ]
for org in orgname:
for i in range(1, 3):
if (i < 10):
urlstr = "http://stage-activenet-0"+str(i)+"w.an.dev.activenetwork.com:3090/"+org+"/servlet/adminlogin.sdi"
else:
urlstr = "http://stage-activenet-"+str(i)+"w.an.dev.activenetwork.com:3090/"+org+"/servlet/adminlogin.sdi"
a = threading.Thread(target=getResponse, args=(urlstr, org))
a.start()
# getResponse(urlstr, org)
# logging.debug('end of program')
<file_sep>/Neoload JARs/HPS3desEncrypt/src/com/active/hpsperf/RawData.java
package com.active.hpsperf;
import java.io.File;
import org.dom4j.Document;
import org.dom4j.DocumentException;
import org.dom4j.Element;
import org.dom4j.io.SAXReader;
public class RawData {
public static String paymentRequest = "<KEY>;
public static String commitTransaction = "<KEY>;
public static String encryptPaymentRequestFromXMLFile() {
Document doc = loadXmlFromFile("src/Commit.xml");
Element rootElement = doc.getRootElement();
return CipherCode.encrypt(rootElement.asXML());
}
public static String encryptCommitRequestFromXMLFile() {
Document doc = loadXmlFromFile("src/Request.xml");
Element rootElement = doc.getRootElement();
return CipherCode.encrypt(rootElement.asXML());
}
private static Document loadXmlFromFile(String requestXMLpath) {
File requestFile = new File(requestXMLpath);
SAXReader reader = new SAXReader();
Document doc;
try {
doc = reader.read(requestFile);
return doc;
} catch (DocumentException e) {
e.printStackTrace();
}
return null;
}
}
<file_sep>/PythonProjects/PerfEnvRelated/Stage_CheckPkg.py
# -*- coding: utf-8 -*-
# !/usr/bin/python
import paramiko
import threading
def ssh2(host, username, passwd, cmd):
try:
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(host, 22, username, passwd, timeout=5)
for m in cmd:
stdin, stdout, stderr = ssh.exec_command(m)
# stdin.write("Y") #简单交互,输入 ‘Y’
out = stdout.readlines()
# 屏幕输出
for o in out:
#print(o)
print("%s : %s" % (host, o))
#print('%s\t start service OK\n' %(host))
ssh.close()
except Exception as e:
print('%s\tError\n:' %(host))
print(e)
if __name__ == '__main__':
# perf orgs
orgname = ['lstgapachejunction', 'lstgbreckenridgerec', 'lstgcampbellrecreation', 'lstgchandleraz',
'lstgchesterfieldparksrec', 'lstgcityofcarlsbad', 'lstgcityofcorona', 'lstgcityofdowney',
'lstgculpepercopandr', 'lstgdenver', 'lstgebparks', 'lstgencinitasparksandrec',
'lstgfalmouthcommunityprog', 'lstgfpdccrecreation', 'lstggepark', 'lstggjparksandrec', 'lstgindymca',
'lstgkansascityymca', 'lstglanguagestars', 'lstglbparks', 'lstgmesaaz', 'lstgminneapolisparks',
'lstgmontgomerycounty', 'lstgmrurecreation', 'lstgnaparec', 'lstgnms', 'lstgnorthshoreymca',
'lstgomahaconservatory', 'lstgoneteamkids', 'lstgportlandparks', 'lstgrightatschool',
'lstgsanjoseparksandrec', 'lstgsdparkandrec', 'lstgsfcmprep', 'lstgymcagreaterbrandywine',
'lstgymcasatx']
# auto orgs
# orgname = ['automt01','automt02', 'automt03', 'automt04', 'automt05', 'automt06',
# 'automt07', 'automt08', 'automt09', 'automt10', 'automt11',
# 'automt12', 'automt13', 'automt14', 'automt15', 'automt16',
# 'automt17', 'automt18','anetdev01','anetdev02', 'anetdev03', 'anetdev04',
# 'jettytest01','jettytest02', 'jettytest03', 'jettytest04', 'jettytest05', 'jettytest06',
# 'jettytest07', 'jettytest08', 'jettytest09', 'jettytest10', 'jettytest11',
# 'jettytest12', 'jettytest13', 'jettytest14', 'linux01', 'linux02', 'linux03', 'linux04',
# 'linux05', 'linux06', 'linux07', 'linux08', 'linux09', 'linux10', 'linux11', 'linux12',
# 'linux13', 'linux14', 'linux15', 'linux16', 'linux17', 'linux18', 'linux19', 'linux21']
# function orgs
# orgname = ['lstgymcasatx','lstgsanjoseparksandrec']
username = "deploy" # 用户名
passwd = "<PASSWORD>" # 密码
cmd = []
for org in orgname:
cmdString = "ls -l /opt/active/ActiveNet/stage/" + org
cmd.append(cmdString)
print("\033[0;37;40m %s \033[0m" %(org))
host1 = 'stage-activenet-02w.an.dev.activenetwork.com'
ssh2(host1, username, passwd, cmd)
print("\033[0;32;43m %s \033[0m" % ("finish"))
cmd.clear()
<file_sep>/PythonProjects/PerfEnvRelated/RushPool_CUI_ClearPackage.py
# -*- coding: utf-8 -*-
# !/usr/bin/python
from pypsexec.client import Client
import threading
import time
def clear_version_pkg_on_remote(server, username, password, executable, remote_path):
# set encrypt=False for Windows 7, Server 2008
c = Client(server, username=username, password=<PASSWORD>, encrypt=False)
# print("\033[0;37;40m%s\033[0m" %(server))
c.connect()
try:
c.create_service()
# get current server's version(except for latest one)
version_list = c.run_executable(executable, arguments="/c \"dir " + remote_path + " /b /ad\"")
# print(type(version_list[0].decode('utf-8')))
# print(version_list[0].decode('utf-8'))
versions = []
version_tmp = ""
for item in version_list[0].decode('utf-8'):
if item not in ('\r', '\n'):
version_tmp = version_tmp + item
continue
elif item == '\r':
continue
elif item == '\n':
versions.append(version_tmp)
version_tmp = ""
continue
# print(versions)
# print("##################")
# print(versions[:-1])
# #versions should be ["19.13.0.066","19.14.0.037"]
# versions =[]
if len(versions) <= 1:
print("\033[0;37;40m%s\033[0m" % (server))
print("only latest build %r on server %r" %(versions[0], server))
print("#" * len(versions) * 15)
else:
print("\033[0;37;40m%s\033[0m" % (server))
for version in versions[:-1]:
print("-- working on %r with version %r " % (server, version))
arguments = "/c \"rd /s /q E:\\acm\\_versions\\" + version + "\""
result = c.run_executable(executable, arguments=arguments)
print("STDOUT:\n%s" % result[0].decode('utf-8') if result[0] else "")
print("STDERR:\n%s" % result[1].decode('utf-8') if result[1] else "")
print("## complete on %r with version %r " % (server, version))
finally:
c.remove_service()
c.disconnect()
# print("STDOUT:\n%s" % result[0].decode('utf-8') if result[0] else "")
# print("STDERR:\n%s" % result[1].decode('utf-8') if result[1] else "")
# print("RC: %d" % result[2])
if __name__ == '__main__':
# servers = ["ANACMP007.active.tan"]
servers = ["ANACMP003.active.tan", "ANACMP003a.active.tan", "ANACMP003b.active.tan",\
"ANACMP004.active.tan", "ANACMP004a.active.tan", "ANACMP004b.active.tan", \
"ANACMP005.active.tan", "ANACMP005a.active.tan", "ANACMP005b.active.tan", \
"ANACMP006.active.tan", "ANACMP006a.active.tan", "ANACMP006b.active.tan", \
"ANACMP006c.active.tan", "ANACMP006d.active.tan", "ANACMP007.active.tan", \
"ANACMP007a.active.tan", "ANACMP007b.active.tan", "ANACMP007c.active.tan",\
"ANACMP007d.active.tan", "ANACMP008.active.tan", "ANACMP008a.active.tan", \
"ANACMP008b.active.tan", "ANACMP008c.active.tan", "ANACMP008d.active.tan"]
# servers = [ "ANACMP003.active.tan", "ANACMP003a.active.tan" ]
username = "tan\\ajia"
password = "<PASSWORD>"
executable = "cmd.exe"
# executable = "iisreset.exe"
# arguments = "/c \"rd /s /q E:\\acm\\_versions\\" + version + "\""
# arguments = "/c \"dir E:\\acm\\_versions /b /ad\""
# arguments = "/all"
remote_path = "E:\\acm\\_versions"
#cmd /c "rd /s /q E:\acm\_versions\19.14.0.041"
for server in servers:
t = threading.Thread(target=clear_version_pkg_on_remote, args=(server, username, password, executable, remote_path))
t.start()
t.join()
# clear_version_pkg_on_remote(server, username, password, executable, remote_path)
<file_sep>/RefreshPerfDB/MakeCPDtoPerfOrg/13_ChangeCacheMode.sql
use acm01vegasjetty
select * from systeminfo where keyword like '%cache%'
--INSERT INTO systeminfo (KEYWORD,KEYWORDVALUE) VALUES ('org_cache_mode','NATIVE')
update systeminfo set KEYWORDVALUE = 'LOCAL' where KEYWORD = 'org_cache_mode'
select * from systeminfo where keyword like '%cache%' <file_sep>/PythonProjects/PerfEnvRelated/tmp02.py
# -*- coding: utf-8 -*-
# !/usr/bin/python
import requests
import threading
import time
import logging
# logging.basicConfig(level=logging.DEBUG, format=' %(asctime)s - %(levelname)s - %(message)s')
# logger = logging.getLogger(__Name__)
# logging.disable(logging.CRITICAL)
def getResponse(url, org):
requests.adapters.DEFAULT_RETRIES = 5
# response = requests.get(url)
# response.raise_for_status()
time_start = time.time()
response = requests.get(url)
# response.raise_for_status()
time_end = time.time()
initial_time = time_end - time_start
server = url[23:25]
rsc = response.status_code
print("Org : %s ; Server : %s ; Status_Code : %r ; initial time : %r s ; hosturl : %s" % (org, server, rsc, initial_time, url))
if __name__ == '__main__' :
# logging.debug('start of program')
thread = []
orgname = ['lstgchicagoparkdistrict','lstgymcala','lstgphoenix','lstgdvusdce','lstgqa01','lstgqa02','lstgqa01trainer']
# orgname = ['lstgchicagoparkdistrict']
# orgname = ['lstgapachejunction' ]
for org in orgname:
for i in range(3, 5):
if (i < 10):
urlstr = "http://stage-activenet-0"+str(i)+"w.an.dev.activenetwork.com:3000/"+org+"/servlet/adminlogin.sdi"
else:
urlstr = "http://stage-activenet-"+str(i)+"w.an.dev.activenetwork.com:3000/"+org+"/servlet/adminlogin.sdi"
a = threading.Thread(target=getResponse, args=(urlstr, org))
a.start()
# getResponse(urlstr, org)
# logging.debug('end of program')
<file_sep>/IISRESET/RushPool_CUI_IISRESET.py
# -*- coding: utf-8 -*-
# !/usr/bin/python
from pypsexec.client import Client
def RunCmdOnRemote(server, username, password, executable, arguments):
# set encrypt=False for Windows 7, Server 2008
c = Client(server, username=username, password=<PASSWORD>, encrypt=False)
c.connect()
try:
c.create_service()
result = c.run_executable(executable, arguments=arguments)
finally:
c.remove_service()
c.disconnect()
print("Current Server : " + server)
print("STDOUT:\n%s" % result[0].decode('utf-8') if result[0] else "")
print("STDERR:\n%s" % result[1].decode('utf-8') if result[1] else "")
if __name__ == '__main__':
# server = "ANACMP003.active.tan"
servers = ["ANACMP003.active.tan", "ANACMP003a.active.tan", "ANACMP003b.active.tan",\
"ANACMP004.active.tan", "ANACMP004a.active.tan", "ANACMP004b.active.tan", \
"ANACMP005.active.tan", "ANACMP005a.active.tan", "ANACMP005b.active.tan", \
"ANACMP006.active.tan", "ANACMP006a.active.tan", "ANACMP006b.active.tan", \
"ANACMP006c.active.tan", "ANACMP006d.active.tan", "ANACMP007.active.tan", \
"ANACMP007a.active.tan", "ANACMP007b.active.tan", "ANACMP007c.active.tan",\
"ANACMP007d.active.tan", "ANACMP008.active.tan", "ANACMP008a.active.tan", \
"ANACMP008b.active.tan", "ANACMP008c.active.tan", "ANACMP008d.active.tan"]
username = "tan\\ajia"
password = "<PASSWORD>"
executable = "iisreset.exe"
arguments = ""
# arguments = "/all"
for server in servers:
RunCmdOnRemote(server, username, password, executable, arguments)
<file_sep>/PythonProjects/PerfEnvRelated/Acm01vegajetty_CUISession.py
# -*- coding: utf-8 -*-
# !/usr/bin/python
import requests
import threading
import logging
# logging.basicConfig(level=logging.DEBUG, format=' %(asctime)s - %(levelname)s - %(message)s')
# logging.disable(logging.CRITICAL)
def getResponse(url):
requests.adapters.DEFAULT_RETRIES = 3
response = requests.get(url)
rsc = response.text
serverNo = url[22:24]
print("hosturl : %s ; Number : %s ; %r\n" % (url, serverNo, rsc))
if __name__ == '__main__' :
# logging.debug('start of program')
thread = []
# orgname = 'perf04'
orgname = 'acm01vegasjetty'
count = 0
for i in range(1, 2):
if (i < 10):
urlstr = "http://perf-activenet-0"+str(i)+"w.an.active.tan:3000/"+orgname+"/servlet/getCUIRequestCount.sdi"
# logging.debug('i is '+ str(i) + ' , url is ' + urlstr)
else:
urlstr = "http://perf-activenet-"+str(i)+"w.an.active.tan:3000/"+orgname+"/servlet/getCUIRequestCount.sdi"
# logging.debug('i is ' + str(i) + ' , url is ' + urlstr)
# print(urlstr)
# print("server %r is initialing" %(i))
a = threading.Thread(target=getResponse, args=(urlstr,))
a.start()
count += 1
print("initial %d orgs" %(count))
# logging.debug('end of program')
<file_sep>/PythonProjects/PerfEnvRelated/PerfQA_WindowsIP.py
# -*- coding: utf-8 -*-
# !/usr/bin/python
from pypsexec.client import Client
def run_cmd_on_remote(server, username, password, executable, arguments):
# set encrypt=False for Windows 7, Server 2008
c = Client(server, username=username, password=<PASSWORD>, encrypt=False)
print("\033[0;37;40m%s\033[0m" %(server))
c.connect()
try:
c.create_service()
result = c.run_executable(executable, arguments=arguments)
finally:
c.remove_service()
c.disconnect()
print("STDOUT:\n%s" % result[0].decode('utf-8') if result[0] else "")
print("STDERR:\n%s" % result[1].decode('utf-8') if result[1] else "")
# print("RC: %d" % result[2])
if __name__ == '__main__':
servers = ["dev-perfqa-01w.dev.activenetwork.com", "dev-perfqa-02w.dev.activenetwork.com", "dev-perfqa-03w.dev.activenetwork.com",\
"dev-perfqa-04w.dev.activenetwork.com", "dev-perfqa-05w.dev.activenetwork.com", "dev-perfqa-06w.dev.activenetwork.com", \
"dev-perfqa-07w.dev.activenetwork.com"]
username = "dev\\ajia"
password = "<PASSWORD>"
executable = "ipconfig.exe"
# executable = "iisreset.exe"
arguments = ""
# arguments = "/all"
#cmd /c "rd /s /q "E:\acm\_versions\19.13.*" "
for server in servers:
run_cmd_on_remote(server, username, password, executable, arguments)
<file_sep>/RefreshPerfDB/RefreshSTGDB/12_EnableCacheContorl.sql
-- Enable cache contorl for test site
-- @<NAME>, 2017-04-07
USE acm01vegasjetty
if not exists (select 1 FROM systeminfo where KEYWORD = 'enable_cache_control_for_non_prod_site')
BEGIN
insert into systeminfo (keyword,keywordvalue) values ('enable_cache_control_for_non_prod_site','true')
END
<file_sep>/RefreshPerfDB/MakeCPDtoPerfOrg/option/02_DisableIgniteLogs.sql
use acm01vegasjetty
if not exists (select * from systeminfo where keyword = 'enable_ignite_monitor')
BEGIN
insert into systeminfo (KEYWORD,KEYWORDVALUE) values ('enable_ignite_monitor', 'false')
END
else
BEGIN
update systeminfo set KEYWORDVALUE = 'false' where KEYWORD = '<PASSWORD>'
END
select * from systeminfo where keyword = 'enable_ignite_monitor'<file_sep>/Neoload JARs/ActiveNetCUIPerfTools/src/com/activenet/performance/GetCustomer.java
package com.activenet.performance;
import java.sql.*;
public class GetCustomer {
String connectionUrl = "jdbc:sqlserver://dev-perfqa-01w.dev.activenetwork.com;database=ActiveNetPerformance;";
String username = "recware";
String password = "<PASSWORD>";
// Get customerID randomly
public String randomCustoemrID(String orgName)
throws ClassNotFoundException {
Class.forName("com.microsoft.sqlserver.jdbc.SQLServerDriver");
String sql = "select top 1 customer_id from " + orgName + " where used = 0 order by newid()";
return getIdFromSQL(sql);
}
String getIdFromSQL(String sql) {
String customerID = null;
try {
Connection con = DriverManager.getConnection(connectionUrl,
username, password);
Statement smt = con.createStatement();
// Get customerID
ResultSet result = smt.executeQuery(sql);
result.next();
customerID = result.getString(1);
smt.close();
con.close();
} catch (Exception e) {
e.printStackTrace();
}
return customerID;
}
}
<file_sep>/CleanCustomerDataToPerf.sql
use perf01
--Clear real emails
UPDATE dbo.SYSTEM_USERS set EMAIL = 'xx_' + EMAIL where EMAIL <> '';
UPDATE dbo.CUSTOMERS set EMAIL = 'xx_' + EMAIL where EMAIL <> '';
UPDATE dbo.CUSTOMERS set ADDITIONAL_EMAIL = 'xx_' + ADDITIONAL_EMAIL where ADDITIONAL_EMAIL <> '';
UPDATE dbo.COMPANIES set EMAIL= 'xx_' + EMAIL where EMAIL <> '';
UPDATE dbo.INSTRUCTORS SET EMAIL = '<EMAIL>' WHERE EMAIL <>'';
UPDATE dbo.SYSTEMINFO SET keywordvalue = 'xx_' + convert(varchar,keywordvalue) where keyword ='account_change_email' AND datalength(KEYWORDVALUE) > 0;
UPDATE dbo.SYSTEMINFO SET keywordvalue = 'xx_' + convert(varchar,keywordvalue) where keyword ='attack_alert_email' AND datalength(KEYWORDVALUE) > 0;
UPDATE dbo.SYSTEMINFO SET keywordvalue = 'xx_' + convert(varchar,keywordvalue) where keyword ='auto_payment_email_failed_address_from' AND datalength(KEYWORDVALUE) > 0;
UPDATE dbo.SYSTEMINFO SET keywordvalue = 'xx_' + convert(varchar,keywordvalue) where keyword ='auto_renewal_email_failed_address_from' AND datalength(KEYWORDVALUE) > 0;
UPDATE dbo.SYSTEMINFO SET keywordvalue = 'xx_' + convert(varchar,keywordvalue) where keyword ='auto_payment_email_successful_address_from' AND datalength(KEYWORDVALUE) > 0;
UPDATE dbo.SYSTEMINFO SET keywordvalue = 'xx_' + convert(varchar,keywordvalue) where keyword ='auto_renewal_email_successful_address_from' AND datalength(KEYWORDVALUE) > 0;
UPDATE dbo.SYSTEMINFO SET keywordvalue = 'xx_' + convert(varchar,keywordvalue) where keyword ='contact_email' AND datalength(KEYWORDVALUE) > 0;
UPDATE dbo.SYSTEMINFO SET keywordvalue = 'xx_' + convert(varchar,keywordvalue) where keyword ='email_service_user_name' AND datalength(KEYWORDVALUE) > 0;
UPDATE dbo.SYSTEMINFO SET keywordvalue = 'xx_' + convert(varchar,keywordvalue) where keyword ='online_registration_notification' AND datalength(KEYWORDVALUE) > 0;
UPDATE dbo.SYSTEMINFO SET keywordvalue = 'xx_' + convert(varchar,keywordvalue) where keyword ='online_reservation_notification' AND datalength(KEYWORDVALUE) > 0;
UPDATE dbo.SYSTEMINFO SET keywordvalue = 'xx_' + convert(varchar,keywordvalue) where keyword ='permit_expiry_email' AND datalength(KEYWORDVALUE) > 0;
UPDATE dbo.SYSTEMINFO SET keywordvalue = 'xx_' + convert(varchar,keywordvalue) where keyword ='pos_reorder_email' AND datalength(KEYWORDVALUE) > 0;
UPDATE dbo.SYSTEMINFO SET keywordvalue = 'xx_' + convert(varchar,keywordvalue) where keyword ='queued_receipts_email' AND datalength(KEYWORDVALUE) > 0;
UPDATE dbo.SYSTEMINFO SET keywordvalue = 'xx_' + convert(varchar,keywordvalue) where keyword ='rcia_email' AND datalength(KEYWORDVALUE) > 0;
UPDATE dbo.SYSTEMINFO SET keywordvalue = 'xx_' + convert(varchar,keywordvalue) where keyword ='romailfromaddress' AND datalength(KEYWORDVALUE) > 0;
UPDATE dbo.SYSTEMINFO SET keywordvalue = 'xx_' + convert(varchar,keywordvalue) where keyword ='romailfromname' AND datalength(KEYWORDVALUE) > 0;
UPDATE dbo.SYSTEMINFO SET keywordvalue = 'xx_' + convert(varchar,keywordvalue) where keyword ='servlet_admin_email' AND datalength(KEYWORDVALUE) > 0;
UPDATE dbo.SITEINFO SET keywordvalue = 'xx_' + convert(varchar, keywordvalue) where keyword ='contact_email' AND datalength(KEYWORDVALUE) > 0;
UPDATE dbo.SITEINFO SET keywordvalue = 'xx_' + convert(varchar, keywordvalue) where keyword ='registration_notification_email' AND datalength(KEYWORDVALUE) > 0;
UPDATE dbo.SITEINFO SET keywordvalue = 'xx_' + convert(varchar, keywordvalue) where keyword ='romailfromaddress' AND datalength(KEYWORDVALUE) > 0;
UPDATE dbo.SITEINFO SET keywordvalue = 'xx_' + convert(varchar, keywordvalue) where keyword ='romailfromname' AND datalength(KEYWORDVALUE) > 0;
-- delete the bcc values, as they can be a list of email addresses
delete from dbo.SYSTEMINFO where keyword ='auto_payment_email_failed_address_bcc' AND datalength(KEYWORDVALUE) > 0;
delete from dbo.SYSTEMINFO where keyword ='auto_payment_email_successful_address_bcc' AND datalength(KEYWORDVALUE) > 0;
delete from dbo.SYSTEMINFO where keyword ='auto_renewal_email_failed_address_bcc' AND datalength(KEYWORDVALUE) > 0;
delete from dbo.SYSTEMINFO where keyword ='auto_renewal_email_successful_address_bcc' AND datalength(KEYWORDVALUE) > 0;
delete from dbo.SYSTEMINFO where keyword ='online_pending_customer_bcc' AND datalength(KEYWORDVALUE) > 0;
--Suspend production auto payments in case you need to use mstest.active that may cause errors send to mstest
UPDATE dbo.ARSCHEDULEHEADER SET SUSPEND_AUTO_PAY = -1;
UPDATE dbo.MEMBERSHIPS SET SUSPEND_AUTO_RENEWAL = -1;
--Disable scheduled reports
UPDATE dbo.REPORTDEFINITION SET RECIPIENT_ADDRESSES = '', ENABLE_EXPORT_TO_FTP = 0, SEND_CONFIRMATION_EMAIL = 0;
--Delete email and message tasks
delete dbo.BULKEMAILTASKS;
delete dbo.BULKEMAILATTACHMENTS;
delete dbo.BULKEMAILACKNOWLEDGEMENTS;
delete dbo.BULKEMAILRECIPIENTS;
delete dbo.OPTOUTHISTORY;
delete dbo.NEWSLETTER_OFFER_ORDER_PRODUCTS;
delete dbo.NEWSLETTER_OFFER_ORDERS;
delete dbo.CUSTOMER_SUBSCRIPTIONLIST;
delete dbo.MESSAGEQUEUES;
delete dbo.MESSAGES;
delete dbo.GROSSPAYEXPORTBATCHES;
delete dbo.ICVERIFYLOG;
delete dbo.REPORTDEFINITIONOVERRIDES;
delete dbo.REPORT_DEFINITION_TIME;
--Update credit card processor host address to localdemo
UPDATE dbo.SYSTEM SET VERISIGNHOSTADDRESS = 'localdemo';
UPDATE dbo.SYSTEM_USERS SET PASSWORD ='<PASSWORD>' WHERE USERNAME <> 'acmcuiuser';
--Disable akamai for local restore
--UPDATE dbo.SYSTEMINFO SET KEYWORDVALUE ='false' WHERE KEYWORD like '<PASSWORD>';
--Disable schedule load customer function
UPDATE dbo.SCHEDULED_LOAD_CUSTOMER SET SCHEDULE_FREQUENCY = 0;
--Clear skylogix configuration
DELETE FROM dbo.SYSTEMINFO WHERE KEYWORD in ('enable_run_skylogix_export', 'skylogix_client_id', 'skylogix_user_name', 'skylogix_user_password');
--Disable Outgoing Email (Admin > System Settings > Configuration - Internet Staff)
IF EXISTS(SELECT * FROM dbo.SYSTEMINFO WHERE KEYWORD = 'disable_outgoing_email') BEGIN
UPDATE dbo.SYSTEMINFO SET KEYWORDVALUE = 'true' WHERE KEYWORD = 'disable_outgoing_email' and cast(KEYWORDVALUE as varchar(5)) = 'false';
END
ELSE BEGIN
INSERT INTO dbo.SYSTEMINFO (KEYWORD, KEYWORDVALUE) VALUES ('disable_outgoing_email', 'true');
END
--Enable cache control on test sites
IF EXISTS(SELECT * FROM dbo.SYSTEMINFO WHERE KEYWORD = 'enable_cache_control_for_non_prod_site') BEGIN
UPDATE dbo.SYSTEMINFO SET KEYWORDVALUE = 'true' WHERE KEYWORD = 'enable_cache_control_for_non_prod_site' and cast(KEYWORDVALUE as varchar(5)) = 'false';
END
ELSE BEGIN
INSERT INTO dbo.SYSTEMINFO (KEYWORD, KEYWORDVALUE) VALUES ('enable_cache_control_for_non_prod_site', 'true');
END
if not exists (select keyword from systeminfo where keyword = 'is_load_test_system')
begin
insert into systeminfo (keyword,keywordvalue) values ('is_load_test_system','true')
end
update systeminfo set keywordvalue = 'false'
where keyword = 'force_email_as_login_name'
update [CUSTOMQUESTIONS] set required = 0 where required = -1
update systeminfo set keywordvalue = '/opt/active/ActiveNet/perf/SQLBACKUPS/perf01' where keyword = 'full_backup_unc'
update systeminfo set keywordvalue = 'W:\perf\SQLBACKUPS\perf01' where keyword = 'full_backup_unc_remote'
update systeminfo set keywordvalue = 'X:\perf01' where keyword = 'image_storage_path'
update systeminfo set keywordvalue = '/opt/active/data/an_filedata/perf01' where keyword = 'image_storage_path_local'
update systeminfo set keywordvalue = '/perf01/jreport/' where keyword = 'jreport_jsp_path'
select * from systeminfo where keyword in ('full_backup_unc','full_backup_unc_remote','image_storage_path','image_storage_path_local','jreport_jsp_path')
if not exists (select keywordvalue from systeminfo where KEYWORD = 'new_cui_url')
BEGIN
insert into systeminfo (KEYWORD,KEYWORDVALUE) VALUES ('new_cui_url','https://ancperf.apm.activecommunities.com')
END
else
BEGIN
update systeminfo set KEYWORDVALUE = 'https://ancperf.apm.activecommunities.com' where KEYWORD = 'new_cui_url'
END
if not exists (select * from systeminfo where keyword = 'enable_ignite_monitor')
BEGIN
insert into systeminfo (KEYWORD,KEYWORDVALUE) values ('enable_ignite_monitor', 'false')
END
else
BEGIN
update systeminfo set KEYWORDVALUE = 'false' where KEYWORD = 'enable_ignite_monitor'
END
select * from systeminfo where keyword = 'enable_ignite_monitor'<file_sep>/Neoload LG Upgrade/copyNeoLoadinstallFiles.sh
#Copy all files to destination servers
for ((i=27; i<=40;i=i+1))
do
# echo "qaneolglin"$i".dev.activenetwork.com"
scp -r /tmp/Neoloadinstallfile ajia@qaneolglin$i.dev.activenetwork.com:/tmp/Neoloadinstallfile
done
<file_sep>/DB_Size_Summary02.sql
create table #DB_Size_Summary
(
stg_db_name nvarchar(50),
stg_db_size nvarchar(50)
)
use LSTGApacheJunction
declare @pages int
declare @dbname sysname
declare @dbsize dec(15,0)
declare @logsize dec(15)
declare @bytesperpage dec(15,0)
declare @pagesperMB dec(15,0)
select @dbsize = sum(convert(dec(15),size))
from dbo.sysfiles
where (status & 64 = 0)
select @logsize = sum(convert(dec(15),size))
from dbo.sysfiles
where (status & 64 <> 0)
select @bytesperpage = low
from master.dbo.spt_values
where number = 1
and type = 'E'
select @pagesperMB = 1048576 / @bytesperpage
insert into #DB_Size_Summary
select database_name = db_name(),
database_size =
ltrim(str((@dbsize + @logsize) / @pagesperMB,15,2) + ' MB')
select * from #DB_Size_Summary
drop table #DB_Size_Summary<file_sep>/PythonProjects/DecodingF5BigIP/DecodingF5BigIP.py
import struct
import sys
def decode(cookie_value):
(host, port, end) = cookie_value.split('.')
(ip1, ip2, ip3, ip4) = [i for i in struct.pack("<I", int(host))]
p = [i for i in struct.pack("<I", int(port))]
port = p[0] * 256 + p[1]
print(" IP : Port --> %+3s.%+3s.%+3s.%+3s:%+5s" % (ip1, ip2, ip3, ip4, port))
if __name__ == '__main__':
# cookie_value = sys.argv[1]
cookie_value = "110536896.20480.0000"
if(len(cookie_value.split('.')) != 3 ):
print("Please input the correct bigIP")
else:
decode(cookie_value)
<file_sep>/RefreshPerfDB/MakeCPDtoPerfOrg/option/01_DealWithSQLError.sql
use acm01vegasjetty
--find team id
select pending_team_owner, * from teams where pending_team_owner is not null and pending_team_owner <> ''
--update transactions table with teamID
update [TRANSACTIONS] set team_id = null
where team_id = 25948 <file_sep>/RefreshPerfDB/RefreshSTGDB/13_UpdateCashSummarySheetPrompt.sql
-- All workstation will show the prompt while login
update [WORKSTATIONS] set [CSS_PROMPT_TYPE] = 0 <file_sep>/UsefulJS/GenerateRandomDate.js
// Javascript skeleton.
// Edit and adapt to your needs.
// The documentation of the NeoLoad Javascript API
// is available in the appendix of the documentation.
var endDate = new Date();
//randomdays range 0~30
var randomDays =(Math.random()*30);
randomDays = Math.round(randomDays);
logger.debug("randomdays :"+randomDays);
endDate.setDate(endDate.getDate()-randomDays);
logger.debug("selectEndDate :"+endDate);
var startDate = new Date();
startDate.setDate(endDate.getDate()-365);
logger.debug("selectStartDate :"+startDate);
//generate StartDate and EndDate
var year_start = startDate.getFullYear();
var month_start = startDate.getMonth()+1;
var day_start = startDate.getDate();
logger.debug("StartDate"+year_start+","+month_start+","+day_start);
if(month_start<10){
month_start = "0"+month_start;
}
if(day_start<10){
day_start = "0"+day_start;
}
var proDate_start = year_start + "-" + month_start + "-" + day_start;
logger.debug("StartDate :"+proDate_start);
context.variableManager.setValue("StartDate",proDate_start);
var year_end = endDate.getFullYear();
var month_end = endDate.getMonth()+1;
var day_end = endDate.getDate();
logger.debug("EndtDate"+year_end+","+month_end+","+day_end);
if(month_end<10){
month_end = "0"+month_end;
}
if(day_end<10){
day_end = "0"+day_end;
}
var proDate_end = year_end + "-" + month_end + "-" + day_end;
logger.debug("EndDate :"+proDate_end);
context.variableManager.setValue("EndDate",proDate_end);<file_sep>/PythonProjects/PerfEnvRelated/LoadGenerator_ChangeHosts.py
# -*- coding: utf-8 -*-
# !/usr/bin/python
import paramiko
import logging
import threading
logging.basicConfig(level=logging.CRITICAL, format=' %(asctime)s - %(name)s - %(levelname)s - %(message)s')
# logging.basicConfig(level=logging.DEBUG, format=' %(asctime)s - %(name)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)
def ssh2(host, username, passwd, cmd):
try:
logger.debug("current server : %r" %(host))
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(host, 22, username, passwd, timeout=5)
for m in cmd:
stdin, stdout, stderr = ssh.exec_command(m)
# stdin.write("Y") #简单交互,输入 ‘Y’
out = stdout.readlines()
# 屏幕输出
for o in out:
#print(o)
print("%s : %s" % (host, o))
#print('%s\t start service OK\n' %(host))
ssh.close()
except:
print('%s\tError\n' %(host))
if __name__ == '__main__':
cmd_ATSOff = ["sudo sed -i 's/10.230.49.33/#10.230.49.33/' /etc/hosts;sudo cat /etc/hosts | grep '10.230.49.33' "] # 关闭ATS
cmd_ATSOn = ["sudo sed -i 's/#10.230.49.33/10.230.49.33/' /etc/hosts;sudo cat /etc/hosts | grep '10.230.49.33' "] # 打开ATS
cmd_CheckATS = ["sudo less /etc/hosts | grep '10.230.49.33' "]
username = "ajia" # 用户名
passwd = "<PASSWORD>xy@an<PASSWORD>" # 密码
threads = [] # 多线程
print("Begin......")
logger.debug("Process Start")
# for i in range(27, 28):
for i in range(27, 41):
if i < 10:
host = 'qaneolglin0' + str(i) + '.dev.activenetwork.com'
else:
host = 'qaneolglin' + str(i) + '.dev.activenetwork.com'
# a = threading.Thread(target=ssh2, args=(host, username, passwd, cmd_ATSOff))
# a = threading.Thread(target=ssh2, args=(host, username, passwd, cmd_ATSOn))
a = threading.Thread(target=ssh2, args=(host, username, passwd, cmd_CheckATS))
a.start()
logger.debug("Process End")
<file_sep>/RefreshPerfDB/MakeCPDtoPerfOrg/20_AccountRedesign.sql
use ACM01vegasJetty
-- login
IF not exists (select keywordvalue from SYSTEMINFO where KEYWORD = 'enable_new_cui_account')
BEGIN
insert into SYSTEMINFO (KEYWORD, KEYWORDVALUE) values ('enable_new_cui_account', 'true')
END
ELSE
BEGIN
update systeminfo set KEYWORDVALUE = '<PASSWORD>' where KEYWORD = '<PASSWORD>ui_account'
END
select * from dbo.systeminfo where keyword = 'enable_new_cui_account'
--myaccount
IF not exists (select keywordvalue from SYSTEMINFO where KEYWORD = 'myaccount_redesign_on_cui')
BEGIN
insert into SYSTEMINFO (KEYWORD, KEYWORDVALUE) values ('myaccount_redesign_on_cui', 'true')
END
ELSE
BEGIN
update systeminfo set KEYWORDVALUE = '<PASSWORD>' where KEYWORD = 'myaccount_redesign_on_cui'
END
select * from dbo.systeminfo where keyword = 'enable_new_cui_account'
<file_sep>/RefreshPerfDB/ignite_cache_copies.sql
use acm01vegas
IF not exists (select keywordvalue from SYSTEMINFO where KEYWORD = 'org_cache_copies')
BEGIN
insert into SYSTEMINFO (KEYWORD, KEYWORDVALUE) values ('org_cache_copies', 1)
END
ELSE
BEGIN
UPDATE SYSTEMINFO SET KEYWORDVALUE = 1 where KEYWORD = 'org_cache_copies'
END
select * from SYSTEMINFO where KEYWORD = 'org_cache_copies'<file_sep>/PythonProjects/PerfEnvRelated/RushPool_StopServiceNew.py
# -*- coding: utf-8 -*-
# !/usr/bin/python
import paramiko
import re
import requests
import threading
def ssh2(host, username, passwd, cmd):
try:
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(host, 22, username, passwd, timeout=5)
for m in cmd:
stdin, stdout, stderr = ssh.exec_command(m)
# stdin.write("Y") #简单交互,输入 ‘Y’
out = stdout.readlines()
# 屏幕输出
for o in out:
#print(o)
print("%s service status : %s" % (host, o))
#print('%s\t start service OK\n' %(host))
ssh.close()
except:
print('%s\tError\n' %(host))
def getMode(url):
requests.adapters.DEFAULT_RETRIES = 3
response = requests.get(url)
response.raise_for_status()
mode = re.search( r"This Org CacheMode -> \[(.*?)\]",response.text).group(1)
return mode
def main():
org_name = "acm01vegasjetty"
org_url = "https://anperf01.active.com/" + org_name + "/servlet/ignitemetrics.sdi"
org_mode = getMode(org_url)
print("org name : %s ; org_mode : %s" %(org_name, org_mode))
cmd_Servlet = ['cd /opt/active/sites/acm01vegasjetty/ActiveNetServlet/config;./stop_service.sh'] # 你要执行的命令列表
cmd_Cache = ['cd /opt/active/sites/ignite01/ActiveNetServlet/config;./stop_cache.sh'] # 你要执行的命令列表
username = "deploy" # 用户名
passwd = "<PASSWORD>" # 密码
threads = [] # 多线程
if ( org_mode == 'REMOTE'):
print("Cache......")
for i in range(1, 3):
if i < 10:
host = 'perf-ignite-0' + str(i) + 'w.an.active.tan'
else:
host = 'perf-ignite-' + str(i) + 'w.an.active.tan'
cache_thread = threading.Thread(target=ssh2, args=(host, username, passwd, cmd_Cache))
threads.append(cache_thread)
print("Servlet......")
for i in range(1, 19):
if i < 10:
host = 'perf-activenet-0' + str(i) + 'w.an.active.tan'
else:
host = 'perf-activenet-' + str(i) + 'w.an.active.tan'
servlet_thread = threading.Thread(target=ssh2, args=(host, username, passwd, cmd_Servlet))
threads.append(servlet_thread)
for t in threads:
t.start()
for t in threads:
t.join()
elif (org_mode == 'LOCAL'):
print("Servlet......")
for i in range(1, 19):
if i < 10:
host = 'perf-activenet-0' + str(i) + 'w.an.active.tan'
else:
host = 'perf-activenet-' + str(i) + 'w.an.active.tan'
a = threading.Thread(target=ssh2, args=(host, username, passwd, cmd_Servlet))
a.start()
else:
print("No vaild org mode found!")
if __name__ == '__main__':
main()
<file_sep>/Neoload JARs/ActiveNetCUIPerfTools/src/com/activenet/performance/GetReceiptNumber.java
import org.apache.poi.xssf.usermodel.XSSFRow;
import org.apache.poi.xssf.usermodel.XSSFSheet;
import org.apache.poi.xssf.usermodel.XSSFWorkbook;
import java.io.*;
public class GetReceiptNumber {
String ReceiptNumberExcelFile = "C:\\ajia\\AutoNumberCheck.xlsx";
// String ReceiptNumberExcelFile = "D:\\Work\\AutoNumberCheck.xlsx";
public void PutReceiptNumberIntoExcel(String timeStampMS, String receiptNumber){
try {
FileInputStream inp = new FileInputStream(new File("C:\\ajia\\AutoNumberCheck.xlsx")); //获取已有文件
XSSFWorkbook wb = new XSSFWorkbook(inp);
XSSFSheet sheet = wb.getSheetAt(0);
XSSFRow row = sheet.getRow(0);
System.out.println(sheet.getLastRowNum()+" "+row.getLastCellNum()); //分别得到最后一行的行号,和一条记录的最后一个单元格
FileOutputStream out=new FileOutputStream(new File("C:\\ajia\\AutoNumberCheck.xlsx")); //向d://test.xls中写数据
row=sheet.createRow((int)(sheet.getLastRowNum()+1)); //在现有行号后追加数据
row.createCell(0).setCellValue(timeStampMS); //设置第一个(从0开始)单元格的数据
row.createCell(1).setCellValue(receiptNumber); //设置第二个(从0开始)单元格的数据
out.flush();
wb.write(out);
out.close();
System.out.println(row.getPhysicalNumberOfCells()+" "+row.getLastCellNum());
} catch (Exception e) {
e.printStackTrace();
}
System.out.println("ok");
}
public static void main(String[] args) {
GetReceiptNumber GRN = new GetReceiptNumber();
String timeStampMS = "123";
String receiptNumber = "1755610.001";
GRN.PutReceiptNumberIntoExcel(timeStampMS, receiptNumber);
}
}
<file_sep>/PythonProjects/PerfEnvRelated/123.py
import paramiko
import re
from xml.dom.minidom import parse
import xml.dom.minidom
import datetime
import requests
import sys
import os
rootPath = os.path.abspath(os.path.join(os.path.join(os.path.dirname("__file__"), os.path.pardir),os.path.pardir))
sys.path.append(rootPath)
sys.path.append(r'C:\Python_Workspace\ApplicationPerformance')
from Performance_CI.DailyPerf.service_properties import hostNameBuilder,RegressionTemplateConfig
from Performance_CI.DailyPerf.DBHandler import DatabaseHandler, TransactionResponseHistory, Version
from Performance_CI.DailyPerf.JenkinsValidator import Jenkins_Api
from Performance_CI.DailyPerf.neoloadweb import neoloadweb
class ReportComposer():
def __init__(self):
# file location for debug
# Jenkins will start this script from workspace as default which makes all the relative path needs to start from here.
# e.g : C:\Program Files (x86)\Jenkins\workspace\Endurance_Performance_Regression>python C:\Python_Workspace\ApplicationPerformance\Performance_CI\DailyPerf\ComposeReport.py
# Current Jenkins configuration
# This script needs to locate
# C:\Python_Workspace\ApplicationPerformance\Performance_CI\DailyPerf\ComposeReport.py
# report_file_source
# C:\Python_Workspace\ApplicationPerformance\Performance_CI\neoload-report\Endurance_Daily_Report_template.html
# report_xml
# C:\Program Files (x86)\Jenkins\workspace\Endurance_Daily_Perf_Test\report.xml
# report_file_gen
# C:\Program Files (x86)\Jenkins\workspace\Endurance_Daily_Perf_Test\Endurance_Daily_Report.html
if os.popen('hostname').read().strip() == 'dev-perfqa-02w':
#file location for jenkins server
self.report_file_source = "../../../../../Python_Workspace/ApplicationPerformance/Performance_CI/neoload-report/Endurance_Daily_Report_template.html"
self.report_file_gen = "./neoload-report/Endurance_Daily_Report.html"
self.report_xml = "./neoload-report/report.xml"
# TODO(<EMAIL>): set the local machine list in the configuration file
elif os.popen('hostname').read().strip() == 'che34080w.active.local' or os.popen('hostname').read().strip() == 'Squall-Yang.local' or os.popen('hostname').read().strip() == 'wl000731051.active.local':
# file location for local
self.report_file_source = "/Users/squallyang/Documents/Python_WorkSpace/ApplicationPerformance/Performance_CI/neoload-report/Endurance_Daily_Report_template.html"
self.report_file_gen = "/Users/squallyang/Documents/Python_WorkSpace/ApplicationPerformance/Performance_CI/neoload-report/Endurance_Daily_Report.html"
self.report_xml = "/Users/squallyang/Documents/Python_WorkSpace/ApplicationPerformance/Performance_CI/neoload-report/report.xml"
else:
raise RuntimeError('Could not find the hostname defined, The current machine hostname is '+os.popen('hostname').read().strip())
self.testStatus = True
self.domtree = xml.dom.minidom.parse(self.report_xml)
self.root = self.domtree.documentElement
self.statistics = self.root.getElementsByTagName("statistics")[0]
self.virtualUser = self.root.getElementsByTagName("virtual-users")[0]
def test_status(self):
file = open(self.report_file_gen, "r", encoding="utf-8")
buff = file.read()
file.close()
if self.testStatus is True:
buff = re.sub('<span>test_status</span>', '<span style="color: green;"><strong>Success</strong></span>', buff, 0)
else:
buff = re.sub('<span>test_status</span>', '<span style="color: red;"><strong>Failed</strong></span>', buff, 0)
file = open(self.report_file_gen, "w", encoding="utf-8")
file.write(buff)
file.close()
def ssh2(self,host, username, passwd, cmd):
try:
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(host, 22, username, passwd, timeout=5)
for m in cmd:
stdin, stdout, stderr = ssh.exec_command(m)
out = stdout.readlines()
ssh.close()
return out
except:
raise RuntimeError('%s\tError\n' % (host))
# Get App version from given server
def app_version(self, testset, host, service_name, base, architecture, *args):
# TODO(<EMAIL>): to save the version info to data base and integrate with API
namespace = args[0]
username = "syang4"
password = "<PASSWORD>"
if base == 'VM':
if architecture == 'CoreServer':
service_properties_file_location = '/opt/active/' + service_name + '/config/service.properties'
command = [
'cat ' + service_properties_file_location + '| grep appVersion=' + "| awk -F '=' '{print $2}' "] # Command list
elif architecture == 'SpringBoot':
command = [
"ls /opt/active/" + service_name + "/ | grep " + service_name + " | awk -F '-' '{print $3}' | awk -F '.jar' '{print $1}'"] # Command list
else:
raise RuntimeError('The architecture setting is incorrect, Please confirm the setting for ' + service_name)
response = self.ssh2(host, username, password, command)
version = response[0].replace("\n", "")
self.save_version_to_db(service_name,version,testset,datetime.datetime.now())
return version
elif base == 'K8s':
if architecture == 'SpringBoot':
host = 'neoloadweb.dev.activenetwork.com'
command = ["kubectl get pods -n '" + namespace + "' | grep " + service_name + " | awk -F ' ' '{print $1}' | head -n 1" ]
pod_name = self.ssh2(host, username, password, command)[0].replace("\n", "")
command = ["kubectl describe pod " + pod_name + " -n '" + namespace + "' | grep artifacts.dev.activenetwork.com | grep " + service_name + " |grep -v 'Image ID' | awk -F ':' '{print $4}'"]
version = self.ssh2(host, username, password, command)[0].replace("\n", "")
self.save_version_to_db(service_name, version, testset, datetime.datetime.now())
return version
else:
raise RuntimeError('The base or architecture setting is incorrect, Please confirm the setting for ' + service_name)
def save_version_to_db(self,service_name,version,testset,create_dt):
dbh = DatabaseHandler()
dbh.save_version(service_name,version,testset,create_dt)
# Update version information
def update_app_version_info(self,testSet):
hb = hostNameBuilder()
aut = (hb.buildShortNamesByTestSet(testSet))
if testSet.find("ANET_Daily_Perf_Test") == -1:
#read the buffer from template and replace the value
with open(self.report_file_source, "r", encoding="utf-8") as file:
buff = file.read()
VersionInfoList = ''
for serviceShortName in aut:
host_name = hb.SingleHost(serviceShortName)[0]
service_name = hb.SingleHost(serviceShortName)[1]
base = hb.SingleHost(serviceShortName)[2]
architecture = hb.SingleHost(serviceShortName)[3]
namespace = hb.SingleHost(serviceShortName)[4]
VersionInfo = service_name + ': ' + self.app_version(testSet,host_name,service_name,base,architecture,namespace) + '<br>'
VersionInfoList = VersionInfoList + VersionInfo
buff = re.sub('VersionInfoToBeUpdated',VersionInfoList,buff, 0)
# write the buff back to the file
with open(self.report_file_gen, "w", encoding="utf-8") as file:
file.write(buff)
return VersionInfoList
else:
TargetURL = "https://apmstg.activecommunities.com/stgacm01vegas/activenet_version"
TargetURL_response = requests.get(TargetURL)
TargetURL_response.raise_for_status()
#read the buffer from template and replace the value
with open(self.report_file_source, "r", encoding="utf-8") as file:
buff = file.read()
VersionInfoList = ''
# for serviceShortName in aut:
ServletVersionInfo = re.search(r'Servlet version: \d{2}.\d{2}.\d{1}.\d{3}', TargetURL_response.text).group().split(';')[1]
AcmCUIVersionInfo = re.search(r'CUI version: \d{2}.\d{2}.\d{1}.\d{3}', TargetURL_response.text).group().split(';')[1]
NewCUIVersionInfo = re.search(r'<span id=\"newCuiVersionSpan\">\d{2}.\d{2}.\d{1}.\d{3}', TargetURL_response.text).group().split('>')[1]
VersionInfoList = 'servlet_version: ' + ServletVersionInfo + '<br>' +'acm_cui_version: ' + AcmCUIVersionInfo + '<br>' + 'new_cui_version: ' + NewCUIVersionInfo + '<br>'
self.save_version_to_db('Servlet version', ServletVersionInfo, testSet, datetime.datetime.now())
self.save_version_to_db('CUI version', AcmCUIVersionInfo, testSet, datetime.datetime.now())
self.save_version_to_db('newCuiVersionSpan', NewCUIVersionInfo, testSet, datetime.datetime.now())
buff = re.sub('VersionInfoToBeUpdated',VersionInfoList,buff, 0)
# write the buff back to the file
with open(self.report_file_gen, "w", encoding="utf-8") as file:
file.write(buff)
return VersionInfoList
# Update Summary table
def update_summary_table(self,testSet):
statistic_elements = self.statistics.getElementsByTagName("statistic")
with open(self.report_file_gen, "r", encoding="utf-8") as file:
buff = file.read()
table_summary_template = RegressionTemplateConfig(testSet).SummaryTable
for statistic_template in table_summary_template:
for statistic_report in statistic_elements:
if statistic_report.getAttribute("name") == statistic_template and statistic_report.getAttribute("name") != "total_errors":
buff = re.sub(statistic_template, statistic_report.getAttribute('value'), buff, 0)
elif statistic_report.getAttribute("name") == "total_errors":
total_errors = statistic_report.getAttribute("value")
if float(total_errors) <= 10:
buff = re.sub('total_errors', total_errors, buff, 0)
else:
self.testStatus = False
buff = re.sub(
'<td width="156">\n' + '.*' + '<p>total_errors</p>\n' + '.*' + '</td>',
'<td style="width: 156px;background: LightSalmon;"><p>' + total_errors + '</p></td>', buff, 0)
with open(self.report_file_gen, "w", encoding="utf-8") as file:
file.write(buff)
# Update response time in table by given parameters
def update_response_time_in_cell(self,report_buff, baseline_label, baseline_value, result_label, result_value, passfail_label,test_set):
report_buff = re.sub(result_label, str(result_value), report_buff, 0)
report_buff = re.sub(baseline_label, str(baseline_value), report_buff, 0)
# Mark as failed if page response time is 0
if float(result_value) < 0.001:
self.testStatus = False
report_buff = re.sub('<td style="width: 90px;">\n' + '.*' + '<p>' + passfail_label + '</p>\n' + '.*' + '</td>',
'<td style="width: 90px;"><p>N/A</p></td>' +
'<td style="width: 90px;background: LightSalmon;"><p>FAIL</p></td>', report_buff, 0)
return report_buff
ratio = (float(result_value) - float(baseline_value)) / float(baseline_value)
dbh = DatabaseHandler()
transaction_name = result_label[0:len(result_label) - 4]
# Assumption - response time should be increased no more than 30% than baseline.
if ratio <= 0.3:
report_buff = re.sub('<td style="width: 90px;">\n' + '.*' + '<p>' + passfail_label + '</p>\n' + '.*' + '</td>',
'<td style="width: 90px;"><p>' + str(round(ratio * 100, 2)) + '%</p></td>' +
'<td style="width: 90px;background: LightGreen;"><p>PASS</p></td>', report_buff, 0)
dbh.save_transaction_response_history(test_set, transaction_name, result_value, datetime.datetime.now(),'PASS')
else:
self.testStatus = False
report_buff = re.sub('<td style="width: 90px;">\n' + '.*' + '<p>' + passfail_label + '</p>\n' + '.*' + '</td>',
'<td style="width: 90px;"><p>' + str(round(ratio * 100, 2)) + '%</p></td>' +
'<td style="width: 90px;background: LightSalmon;"><p>FAIL</p></td>', report_buff, 0)
dbh.save_transaction_response_history(test_set, transaction_name, result_value, datetime.datetime.now(), 'FAILED')
return report_buff
def get_duplicate_transaction_name_count(self,test_set):
statisticItem_element = self.virtualUser.getElementsByTagName("statistic-item")
transactions_name_template = RegressionTemplateConfig(test_set).Transactions.keys()
transaction_count = {}
for transaction_name_template in transactions_name_template:
for transactions_name_report in statisticItem_element:
if transactions_name_report.getAttribute('name') == transaction_name_template:
if transactions_name_report.getAttribute('name') in transaction_count:
transaction_count[transactions_name_report.getAttribute('name')] += 1
else:
transaction_count[transactions_name_report.getAttribute('name')] = 1
return transaction_count
def get_highest_executed_avg(self,transaction_name):
statisticItem_element = self.virtualUser.getElementsByTagName("statistic-item")
transaction_response_time_list = []
transaction_execution_count_list = []
for transactions_name_report in statisticItem_element:
if transactions_name_report.getAttribute('name') == transaction_name:
transaction_response_time_list.append(transactions_name_report.getAttribute('avg'))
transaction_execution_count_list.append(transactions_name_report.getAttribute('hits'))
return transaction_response_time_list[transaction_execution_count_list.index(max(transaction_execution_count_list))]
# Update Transaction table template
def update_transaction_table_template(self,test_set):
with open(self.report_file_gen,'r',encoding='utf-8') as file:
buff = file.read()
transactions_name_template = RegressionTemplateConfig(test_set).Transactions
transaction_entries = ''
for transaction_name_template in transactions_name_template:
transaction_entry = '<tr>\n<td style="width: 330px;">\n<p>' + transaction_name_template + '</p>\n</td>\n<td style="width: 90px;">\n<p>' + transaction_name_template + '_base</p>\n</td>\n<td style="width: 90px;">\n<p>' + transaction_name_template + '_avg</p>\n</td>\n<td style="width: 90px;">\n<p>' + transaction_name_template + '_result</p>\n</td>\n</tr>\n'
transaction_entries = transaction_entries + transaction_entry
buff = re.sub('TransactionsSummary',transaction_entries,buff, 0)
# write the buff back to the file
with open(self.report_file_gen, "w", encoding="utf-8") as file:
file.write(buff)
# Update response time matrix
def update_response_time_in_table(self, test_set, base_line_mode='fix', interval=10):
'''
:param test_set: the test set name which could be set in the service-properties.yaml. the test set name
needs to match the jenkins job name
:param base_line_mode: can be select for fix mode and dynamic mode, fix mode to take the baseline from
service-properties.yaml file. dynamic mode to calculate from database
:param interval: only be used for dynamic mode. used use for define the baseline calculation period by days
:return:None
'''
statisticItem_element = self.virtualUser.getElementsByTagName("statistic-item")
with open(self.report_file_gen, "r", encoding="utf-8") as file:
buff = file.read()
if base_line_mode == 'fix':
transactions_name_template = RegressionTemplateConfig(test_set).Transactions
elif base_line_mode == 'dynamic':
dbh = DatabaseHandler()
transactions_name_template = dbh.get_baseline_from_all_history('"' + test_set + '"', str(interval))
for transaction_name_template in transactions_name_template:
for transactions_name_report in statisticItem_element:
# TODO(<EMAIL>): To calculate baseline from Database instead of hard code.
if transactions_name_report.getAttribute('name') == transaction_name_template:
# Handle duplicated transaction name
if self.get_duplicate_transaction_name_count(test_set).get(transaction_name_template) > 1:
buff = self.update_response_time_in_cell(buff, transaction_name_template + '_base',
transactions_name_template.get(transaction_name_template),
transaction_name_template + '_avg',
self.get_highest_executed_avg(transaction_name_template),
transaction_name_template + '_result',test_set)
# Handle transaction name not duplicated
elif self.get_duplicate_transaction_name_count(test_set).get(transaction_name_template) == 1:
buff = self.update_response_time_in_cell(buff, transaction_name_template + '_base',
transactions_name_template.get(transaction_name_template),
transaction_name_template + '_avg',
transactions_name_report.getAttribute("avg"),
transaction_name_template + '_result',test_set)
with open(self.report_file_gen, "w", encoding="utf-8") as file:
file.write(buff)
def compose_report(self):
if os.popen('hostname').read().strip() == 'dev-perfqa-02w':
ja = Jenkins_Api()
JobName = ja.get_last_build_job_name()
self.update_app_version_info(JobName)
self.update_summary_table(JobName)
self.update_transaction_table_template(JobName)
self.update_response_time_in_table(JobName, base_line_mode='dynamic')
self.test_status()
elif os.popen('hostname').read().strip() == 'che34080w.active.local' or os.popen(
'hostname').read().strip() == 'Squall-Yang.local' or os.popen('hostname').read().strip() == 'wl000731051.active.local':
# TODO(<EMAIL>): set the local machine list in the configuration file
self.update_app_version_info('Endurance_Daily_Perf_Test')
self.update_summary_table('Endurance_Daily_Perf_Test')
self.update_transaction_table_template('Endurance_Daily_Perf_Test')
self.update_response_time_in_table('Endurance_Daily_Perf_Test', base_line_mode='dynamic')
self.test_status()
else:
raise RuntimeError('Could not find the hostname defined, The current machine hostname is ' + os.popen(
'hostname').read().strip())
if __name__ == '__main__':
rc = ReportComposer()
rc.compose_report()
nlw = neoloadweb()
nlw.delete_tests(nlw.get_expired_test_id())
<file_sep>/RefreshPerfDB/RefreshSTGDB/0_ChangeDBRecoveryMode.sql
USE [master];
ALTER DATABASE STGACM01vegas SET RECOVERY SIMPLE WITH NO_WAIT;<file_sep>/Neoload JARs/HPS3desEncrypt/src/com/active/hpsperf/CipherCode.java
package com.active.hpsperf;
import javax.crypto.*;
import javax.crypto.spec.IvParameterSpec;
import javax.crypto.spec.SecretKeySpec;
import java.util.Base64;
import java.util.Base64.Encoder;
import java.util.Base64.Decoder;
public class CipherCode {
private static final String Algorithm = "DESede";
// private static final String PASSWORD_CRYPT_KEY = "<KEY>";
private static final String Vector = "AF4BD107";
public static String encrypt(String src) {
byte[] byEncrypted = null;
String strEncoded = null;
try {
SecretKey deskey = new SecretKeySpec(build3DesKey(), Algorithm);
Cipher c1 = Cipher.getInstance("DESede/CBC/PKCS5Padding");
c1.init(Cipher.ENCRYPT_MODE, deskey, buildIV(Vector));
byEncrypted = c1.doFinal(src.getBytes());
} catch (java.security.NoSuchAlgorithmException e1) {
e1.printStackTrace();
} catch (javax.crypto.NoSuchPaddingException e2) {
e2.printStackTrace();
} catch (java.lang.Exception e3) {
e3.printStackTrace();
}
if (byEncrypted != null) {
Encoder ecoder = Base64.getEncoder();
strEncoded = new String(ecoder.encode(byEncrypted));
}
return strEncoded;
}
public static String decrypt(String src) {
// Decode Base64 String
Decoder decoder = Base64.getDecoder();
byte[] bySrcDecdoded = decoder.decode(src.getBytes());
String strDecoded = null;
try {
SecretKey deskey = new SecretKeySpec(build3DesKey(), Algorithm);
Cipher c1 = Cipher.getInstance("DESede/CBC/PKCS5Padding");
c1.init(Cipher.DECRYPT_MODE, deskey, buildIV(Vector));
strDecoded = new String(c1.doFinal(bySrcDecdoded));
} catch (java.security.NoSuchAlgorithmException e1) {
e1.printStackTrace();
} catch (javax.crypto.NoSuchPaddingException e2) {
e2.printStackTrace();
} catch (java.lang.Exception e3) {
e3.printStackTrace();
}
return strDecoded;
}
private static byte[] build3DesKey() {
/*
* For HPS 3des decryption, I think Java cannot decrypt the cipher text
* generated by HPS directly, for HPS use one special .NET class. so
* setup a VB.NET to run the VB code, then ¡®hacking¡¯ the intermediate
* value and put in Java
*/
byte[] bykey = { (byte) 128, (byte) 247, (byte) 62, (byte) 137,
(byte) 84, (byte) 196, (byte) 218, (byte) 241, (byte) 247,
(byte) 32, (byte) 239, (byte) 244, (byte) 7, (byte) 137,
(byte) 25, (byte) 14, (byte) 199, (byte) 4, (byte) 218,
(byte) 253, (byte) 82, (byte) 227, (byte) 115, (byte) 233 };
return bykey;
}
private static IvParameterSpec buildIV(String sIV) {
byte[] byIV = new byte[8]; // Declare 8 bit blank array
byte[] temp = sIV.getBytes();
if (byIV.length > temp.length) {
System.arraycopy(temp, 0, byIV, 0, temp.length);
} else {
System.arraycopy(temp, 0, byIV, 0, byIV.length);
}
return new IvParameterSpec(byIV);
}
}
<file_sep>/PythonProjects/PerfEnvRelated/Stage_CheckLogs.py
# -*- coding: utf-8 -*-
# !/usr/bin/python
import paramiko
import threading
def ssh2(host, username, passwd, cmd):
try:
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(host, 22, username, passwd, timeout=5)
for m in cmd:
stdin, stdout, stderr = ssh.exec_command(m)
# stdin.write("Y") #简单交互,输入 ‘Y’
out = stdout.readlines()
# 屏幕输出
for o in out:
print(o)
# print("%s service status : %s" % (host, o))
#print('%s\t start service OK\n' %(host))
ssh.close()
except Exception as e:
print('%s\tError\n:' %(host))
print(e)
if __name__ == '__main__':
orgname = ['lstgapachejunction', 'lstgbreckenridgerec', 'lstgcampbellrecreation', 'lstgchandleraz',
'lstgchesterfieldparksrec', 'lstgcityofcarlsbad', 'lstgcityofcorona', 'lstgcityofdowney',
'lstgculpepercopandr', 'lstgdenver', 'lstgebparks', 'lstgencinitasparksandrec',
'lstgfalmouthcommunityprog', 'lstgfpdccrecreation', 'lstggepark', 'lstggjparksandrec', 'lstgindymca',
'lstgkansascityymca', 'lstglanguagestars', 'lstglbparks', 'lstgmesaaz', 'lstgminneapolisparks',
'lstgmontgomerycounty', 'lstgmrurecreation', 'lstgnaparec', 'lstgnms', 'lstgnorthshoreymca',
'lstgomahaconservatory', 'lstgoneteamkids', 'lstgportlandparks', 'lstgrightatschool',
'lstgsanjoseparksandrec', 'lstgsdparkandrec', 'lstgsfcmprep', 'lstgymcagreaterbrandywine',
'lstgymcasatx']
# orgname = ['lstgapachejunction']
username = "deploy" # 用户名
passwd = "<PASSWORD>" # 密码
host1 = "stage-activenet-01w.an.dev.activenetwork.com"
print("Begin......Servlet")
for org in orgname:
print(org)
cmd = []
# cmd_line = "cd /opt/active/ActiveNet/stage/" + org + "/19.08.0.049/stage-activenet-01w.an.dev.activenetwork.com/ActiveNetServlet/logs;ls -l "
cmd_line = "cd /opt/active/ActiveNet/stage/" + org + "/19.08.0.049/stage-activenet-01w.an.dev.activenetwork.com/ActiveNetServlet/logs;ls -l| grep total |awk \'{print $2}\' "
# print(cmd_line)
cmd.append(cmd_line)
ssh2(host1, username, passwd, cmd)
<file_sep>/UsefulJS/Date_Gen.js
// Set start & end date & random range
var startDate = new Date();
var endDate = new Date();
//Generate randomdays[1,30]
var min = 1;
var max = 365;
var rndDays = Math.round(Math.random()*(max-min+1)+min);
startDate.setDate( startDate.getDate() - rndDays);
endDate.setDate( endDate.getDate() - rndDays + 30);
logger.debug("startDate = "+startDate);
logger.debug("endDate = "+endDate);
// Format date
var startDate_month = startDate.getMonth()+1;
var startDate_day = startDate.getDate();
logger.debug("startDate_month = "+startDate_month);
logger.debug("startDate_day = "+startDate_day);
var endDate_month = endDate.getMonth()+1;
var endDate_day = endDate.getDate();
logger.debug("endDate_month = "+endDate_month);
logger.debug("endDate_day = "+endDate_day);
function formatDate(monthOrDay)
{
if (monthOrDay < 10) {
return "0" + monthOrDay;
} else {
return monthOrDay;
}
}
var startRange = startDate.getFullYear() + "-" + formatDate(startDate_month) + "-" + formatDate(startDate_day);
var endRange = endDate.getFullYear() + "-" + formatDate(endDate_month) + "-" + formatDate(endDate_day);
logger.debug("startRange = "+startRange);
logger.debug("endRange = "+endRange);
// Return value
context.variableManager.setValue("Start_Date",startRange);
context.variableManager.setValue("End_Date",endRange);<file_sep>/PythonProjects/PerfEnvRelated/FTP_tmp.py
# -*- coding: utf-8 -*-
# !/usr/bin/python
import paramiko
import datetime
import threading
import logging
import os
logging.basicConfig(level=logging.CRITICAL, format=' %(asctime)s - %(name)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)
def upload(host, username, passwd, local, remote, file_type):
try:
trans = paramiko.Transport((host, 22))
trans.connect(username=username, password=<PASSWORD>)
sftp = paramiko.SFTPClient.from_transport(trans)
if file_type == 1:
logger.debug()
print(' upload file on %s Start %s ' % ( host, datetime.datetime.now()))
sftp.put(local, remote)
print('upload file on %s End %s ' % ( host, datetime.datetime.now()))
elif file_type == 2:
files = os.listdir(local)
for f in files:
print(' upload file %s on %s Start %s ' % (str(f), host, datetime.datetime.now()))
sftp.put(os.path.join(local, f), os.path.join(remote, f))
print('upload file %s on %s End %s ' % (str(f), host, datetime.datetime.now()))
else:
raise Exception('invalid document type')
except Exception as e:
print('%s\t connect error\n' %(host))
print("-----------ExceptLog-----------")
print(e)
finally:
trans.close()
def download(host, username, passwd, local, remote, file_type):
try:
trans = paramiko.Transport((host, 22))
trans.connect(username=username, password=<PASSWORD>)
sftp = paramiko.SFTPClient.from_transport(trans)
if file_type == 1:
print(' download file on %s Start %s ' % (host, datetime.datetime.now()))
sftp.get(remote, local)
print('download file on %s End %s ' % (host, datetime.datetime.now()))
elif file_type == 2:
files = sftp.listdir(remote)
for f in files:
if ".log" in f:
print(' download file %s on %s Start %s ' % (str(f), host, datetime.datetime.now()))
sftp.get(os.path.join(remote, f), os.path.join(local, f))
print('download file %s on %s End %s ' % (str(f), host, datetime.datetime.now()))
else:
raise Exception('invalid document type')
except Exception as e:
print('%s\t connect error\n' %(host))
print("-----------ExceptLog-----------")
print(e)
finally:
trans.close()
if __name__ == '__main__':
username = "deploy" # 用户名
passwd = "<PASSWORD>" # 密码
print("Begin......")
# for i in range(1, 3):
#
# if i < 10:
# host = 'perf-ignite-0' + str(i) + 'w.an.active.tan'
# else:
# host = 'perf-ignite-' + str(i) + 'w.an.active.tan'
remote_log_path = '/opt/active/sites/acm01vegasjetty/ActiveNetCUI/logs'
local_log_path = '/Users/ajia/Documents/tmp/newcuilog'
for i in range(1, 2):
# for i in range(20, 21):
# if i < 10:
# host = 'perf-activenet-0' + str(i) + 'w.an.active.tan'
# else:
# host = 'perf-activenet-' + str(i) + 'w.an.active.tan'
# perf-activenet-cui-01w.an.active.tan
if i < 10:
host = 'perf-activenet-cui-0' + str(i) + 'w.an.active.tan'
else:
host = 'perf-activenet-cui-' + str(i) + 'w.an.active.tan'
# print(host)
download(host, username, passwd, local_log_path, remote_log_path, 2)
<file_sep>/RefreshPerfDB/MakeCPDtoPerfOrg/15_NewCartAddress.sql
use acm01vegasjetty
if not exists (select keywordvalue from systeminfo where KEYWORD = 'new_cui_url')
BEGIN
insert into systeminfo (KEYWORD,KEYWORDVALUE) VALUES ('new_cui_url','https://ancperf.apm.activecommunities.com')
END
else
BEGIN
update systeminfo set KEYWORDVALUE = 'https://ancperf.apm.activecommunities.com' where KEYWORD = 'new_cui_url'
END
select * from systeminfo where KEYWORD = 'new_cui_url'<file_sep>/RefreshPerfDB/MakeCPDtoPerfOrg/03_AllowLoginAsCustomer_ID.sql
use acm01vegasjetty
-- Allow customer login as Customer_ID
update systeminfo set keywordvalue = 'false'
where keyword = 'force_email_as_login_name'<file_sep>/RefreshPerfDB/MakeCPDtoPerfOrg/21_OrgCacheCopies.sql
use ACM01vegasJetty
select * from SYSTEMINFO where keyword = 'org_cache_copies'
insert into SYSTEMINFO (KEYWORD, KEYWORDVALUE) values ('org_cache_copies', 3)
select * from SYSTEMINFO where keyword = 'org_cache_copies'<file_sep>/UsefulJS/DateGenerator.js
// Generate random date between given range
// <NAME>
// July 31,2019
// Set start & end date & random range
var endDate = new Date();
//random days
var rndDays = Math.round(Math.random()*180);
rndDays = Math.round(rndDays);
logger.debug("randomdays :"+rndDays);
endDate.setUTCDate(endDate.getUTCDate()-rndDays);
logger.debug("selectEndDate :"+endDate);
var endDate_month = endDate.getUTCMonth()+1;
var endDate_day = endDate.getUTCDate();
var endRange = endDate.getUTCFullYear() + "-" + formatDate(endDate_month) + "-" + formatDate(endDate_day);
//set startDate
var startDate = endDate;
startDate.setUTCDate(endDate.getUTCDate()-90);
logger.debug("selectStartDate :"+startDate);
// Format date
var startDate_month = startDate.getUTCMonth()+1;
var startDate_day = startDate.getUTCDate();
function formatDate(monthOrDay)
{
if (monthOrDay < 10) {
return "0" + monthOrDay;
} else {
return monthOrDay;
}
}
var startRange = startDate.getUTCFullYear() + "-" + formatDate(startDate_month) + "-" + formatDate(startDate_day);
logger.debug("startRange = "+startRange);
logger.debug("endRange = "+endRange);
// Return value
context.variableManager.setValue("Start_Date",startRange);
context.variableManager.setValue("End_Date",endRange);<file_sep>/UsefulJS/GetPageInfo_JSON.js
// Get Page Info
// Author: <NAME>
// Last modified date: Oct 12,2017
var response = context.variableManager.getValue("Response_Body");
if (response==null) {
context.fail("Variable 'Response_Body' not found");
}
//logger.debug("Response_Body = " + response);
var responseJason = JSON.parse(response);
try{
var maxpageNumber = JSON.stringify(responseJason.headers.page_info.total_page);
//Inject the computed value in a runtime variable
context.variableManager.setValue("MaxPageNumber",maxpageNumber);
}
catch(err)
{
context.variableManager.setValue("MaxPageNumber","<NOT FOUND>");
}
logger.debug("maxpageNumber = " + maxpageNumber);<file_sep>/RefreshPerfDB/MakeCPDtoPerfOrg/02_AMS_loadTest.sql
use acm01vegasjetty
if not exists (select keyword from systeminfo where keyword = 'is_load_test_system')
begin
insert into systeminfo (keyword,keywordvalue) values ('is_load_test_system','true')
end
---- Need SQL about set AMS load test server etc...
---- Manually update the AMS settings in AUI
<file_sep>/RefreshPerfDB/MobileAPI-Perf01Jetty/8_UpdateSessions.sql
-- Author: <NAME>
-- Last modified: 2018-04-09
-- WARNING: We have to update session from & to date one by one manually or by Neoload scripts.
-- Site_id = 5: Bellevue Family YMCA
use perf01jetty
update DCSESSIONS
set WEEKDAYS = '0111110'
, ENROLLMIN = 1
, ENROLLMAX = 999
, BEGINNINGDATE = '2018-04-01 00:00:00.000'
, ENDINGDATE = '2018-12-30 00:00:00.000'
where dcsession_id in
(select s.dcsession_id from dcprograms as p,[DCPROGRAMSESSIONS] as ps,dcsessions as s, [PROGRAMTYPES] as pt
where p.dcprogram_id = ps.dcprogram_id
and ps.dcsession_id = s.dcsession_id
and s.programtype_id = pt.programtype_id
and p.dcprogram_id in (5983,5984,5988,5989,6116,6060,6061,6062,6063,6070,6071,5992,5993,5994,5995,5996,5997,6121,6127,6128,6129,6130,6131,6132,6133,6134,6135,6136,5436,5437)
and p.site_id =5
)
update stages
set showonline = 0
, signatureline = 0
, REQUIRED_BEFORE_COMPLETING_TRANSACTION = 0
, REQUIRE_INITIALS_ONLINE = 0
update ATTACHEDCHECKLISTITEMS
set showonline = 0
, signatureline = 0
, REQUIRED_BEFORE_COMPLETING_TRANSACTION = 0
, REQUIRE_INITIALS_ONLINE = 0
, ITEMSIGNEDONLINE = 0
where dcprogram_id in (5983,5984,5988,5989,6116,6060,6061,6062,6063,6070,6071,5992,5993,5994,5995,5996,5997,6121,6127,6128,6129,6130,6131,6132,6133,6134,6135,6136,5436,5437)
select DCSESSION_ID,PROGRAMTYPE_ID,SITE_ID, FACILITY_ID, DCSESSIONNAME from DCSESSIONS
where dcsession_id in
(select s.dcsession_id from dcprograms as p,[DCPROGRAMSESSIONS] as ps,dcsessions as s, [PROGRAMTYPES] as pt
where p.dcprogram_id = ps.dcprogram_id
and ps.dcsession_id = s.dcsession_id
and s.programtype_id = pt.programtype_id
and p.dcprogram_id in (5983,5984,5988,5989,6116,6060,6061,6062,6063,6070,6071,5992,5993,5994,5995,5996,5997,6121,6127,6128,6129,6130,6131,6132,6133,6134,6135,6136,5436,5437)
and p.site_id =5
)<file_sep>/Neoload JARs/HPS3desEncrypt/src/com/active/hpsperf/PaymentResponse.java
package com.active.hpsperf;
import org.dom4j.Document;
import org.dom4j.Element;
public class PaymentResponse {
public String Message = null;
public String OtherFields = null;
public String AuthorizationNumber = null;
public String WalletId = null;
public PaymentResponse(String response) {
Document responseDoc = Request.loadXmlFromString(response);
Element rootElement = responseDoc.getRootElement();
Message = rootElement.elementText("Message");
OtherFields = rootElement.elementText("OtherFields");
AuthorizationNumber = rootElement.elementText("AuthorizationNumber");
WalletId = rootElement.element("CardInfo").elementText("WalletId");
}
}
<file_sep>/PythonProjects/PerfEnvRelated/StandardPool_InitTestOrgs.py
# -*- coding: utf-8 -*-
# !/usr/bin/python
import sys
import requests
import threading
import logging
import time
# logging.basicConfig(level=logging.DEBUG, format=' %(asctime)s - %(levelname)s - %(message)s')
# logging.disable(logging.CRITICAL)
def getResponse(url):
requests.adapters.DEFAULT_RETRIES = 5
time_start = time.time()
response = requests.get(url)
# response.raise_for_status()
time_end = time.time()
initial_time = time_end - time_start
rsc = response.status_code
serverNo = url[18:20]
print("Server : %s ; Status_Code : %r ; initial time : %r s ; hosturl : %s" % (serverNo, rsc, initial_time, url))
if __name__ == '__main__' :
# logging.debug('start of program')
print("processing...")
thread = []
# orgname = ['perf01', 'perf01jetty', 'perf02', 'perf03', 'acm01vegas']
# base_orgname = "acm01vegas"
base_orgname = "perf02"
orgname = []
for orgindex in range(1,51):
if (orgindex < 10):
orgindex = base_orgname + "0" + str(orgindex);
else:
orgindex = base_orgname + str(orgindex)
orgname.append(orgindex)
for org in orgname:
for i in range(19, 21):
urlstr = "http://10.119.43.1"+str(i)+":3000/"+org+"/servlet/adminlogin.sdi"
a = threading.Thread(target=getResponse, args=(urlstr,))
a.start()
orgname.clear()
# logging.debug('end of program')
<file_sep>/UsefulJS/GetRandomDate.js
//GetRandomDate by Andrew
//random date range:2017-01-01~2017-12-31
var year =2017;
//Get random month number in [6,12]
var min =6 ;
var max = 12;
var random = Math.random();
var count = Math.floor(min+random*(max-min));
var month = count;
var monthforswitch = month;
if(month < 10)
{
month ="0"+month;
}
logger.debug("Random month is: "+month);
//Get random day number
switch(monthforswitch)
{
case 7:
case 8:
case 10:
case 12:
var min = 1;
var max = 31;
var random = Math.random();
var count = Math.floor(min+random*(max-min));
var day = count;
if(day < 10)
{
day ="0"+day;
}
logger.debug("Random day is: "+day);
break;
case 6:
case 9:
case 11:
var min = 1;
var max = 30;
var random = Math.random();
var count = Math.floor(min+random*(max-min));
var day = count;
if(day < 10)
{
day ="0"+day;
}
logger.debug("Random day is: "+day);
break;
default:
logger.debug("Error Date inRandom Day");
break;
}
var randomdate =""+year+"-"+month+"-"+day;
logger.debug("RandomDate is :"+randomdate);
context.variableManager.setValue("Randomdate",randomdate);<file_sep>/RefreshPerfDB/MobileAPI-Perf01Jetty/13_QueryKidsInfo.sql
use perf01jetty
select distinct customer_id,dcprogram_id into #tempCustomers
from transactions
where customer_id > 18000000
and transaction_id > 3872959
select tc.customer_id,c.firstname,c.LASTNAME, c.homephone, p.PASSNUMBER , tc.dcprogram_id, ak.alternatekey_id
from #tempCustomers as tc
left join customers as c on tc.customer_id = c.customer_id
left join passes as p on tc.customer_id = p.customer_id
left join CUSTOMER_ALTERNATE_KEYS as ak on ak.customer_id = tc.customer_id<file_sep>/DB_Size_Summary.sql
IF EXISTS (SELECT * FROM tempdb.sys.all_objects WHERE name like '%#DBSize%')
DROP TABLE DBO.#DBSize
CREATE TABLE #DBSize
(
DBName varchar(100),
DBStatus varchar(20),
Recovery_Model varchar(100),
Database_Path varchar(500),
File_Size float,
Space_Used float,
Free_Space float,
File_Size_String Varchar(100),
Space_Used_String Varchar(100),
Free_Space_String Varchar(100)
)
insert into #DBSize(Dbname, DBStatus, Recovery_Model, Database_Path, file_Size, Space_Used, Free_Space)
exec sp_msforeachdb
'USE [?];
SELECT DB_NAME() AS DbName,
CONVERT(varchar(20),DatabasePropertyEx(''?'',''Status'')) ,
CONVERT(varchar(20),DatabasePropertyEx(''?'',''Recovery'')),
physical_name,
size*8.00 AS File_Size,
FILEPROPERTY(name, ''SpaceUsed'')*8.00 as Space_Used,
size*8.00 - FILEPROPERTY(name, ''SpaceUsed'')*8.00 AS Free_Space
FROM sys.database_files -- where type in 0
'
UPDATE #DBSize SET
File_Size_String =
CASE
WHEN File_Size/1024.00 > 1000 THEN Convert(Varchar(50),Convert(decimal(20,2),(File_Size/1024/1024.00))) + ' GB'
ELSE Convert(Varchar(50),Convert(decimal(20,2),(File_Size/1024.00))) + ' MB'
END,
Space_Used_String =
CASE
WHEN Space_Used/1024.00 > 1000 THEN Convert(Varchar(50),Convert(decimal(20,2),(Space_Used/1024/1024.00))) + ' GB'
ELSE Convert(Varchar(50),Convert(decimal(20,2),(Space_Used/1024.00))) + ' MB'
END,
Free_Space_String =
CASE
WHEN Free_Space/1024.00 > 1000 THEN Convert(Varchar(50),Convert(decimal(20,2),(Free_Space/1024/1024.00))) + ' GB'
ELSE Convert(Varchar(50),Convert(decimal(20,2),(Free_Space/1024.00))) + ' MB'
END
SELECT DBName, DBStatus, Recovery_Model, File_Size_String, Space_Used_String, Free_Space_String, Database_Path
FROM #DBSize
ORDER BY File_Size DESC, Space_Used DESC, Free_Space DESC,DBName
--SELECT * FROM #DBSize
IF EXISTS (SELECT * FROM tempdb.sys.all_objects WHERE name like '%#DBSize%')
DROP TABLE #dbsize
<file_sep>/PythonProjects/PerfEnvRelated/Stage_DBQurey.py
def main():
DB_Name = ['lstgapachejunction', 'lstgbreckenridgerec', 'lstgcampbellrecreation', 'lstgchandleraz',
'lstgchesterfieldparksrec', 'lstgcityofcarlsbad', 'lstgcityofcorona', 'lstgcityofdowney',
'lstgculpepercopandr', 'lstgdenver', 'lstgebparks', 'lstgencinitasparksandrec',
'lstgfalmouthcommunityprog', 'lstgfpdccrecreation', 'lstggepark', 'lstggjparksandrec', 'lstgindymca',
'lstgkansascityymca', 'lstglanguagestars', 'lstglbparks', 'lstgmesaaz', 'lstgminneapolisparks',
'lstgmontgomerycounty', 'lstgmrurecreation', 'lstgnaparec', 'lstgnms', 'lstgnorthshoreymca',
'lstgomahaconservatory', 'lstgoneteamkids', 'lstgportlandparks', 'lstgrightatschool',
'lstgsanjoseparksandrec', 'lstgsdparkandrec', 'lstgsfcmprep', 'lstgymcagreaterbrandywine',
'lstgymcasatx']
# DB_Name = ['lstgapachejunction', 'lstgymcasatx']
for db in DB_Name:
with open(r"/Users/ajia/Documents/tmp/STG_OrgCacheCopies_2.txt", 'a+') as qurey:
# qurey.write("use " + "Activenetsites" + '\n')
# qurey.write("update ORGS set IS_TAX_EXEMPT = 0 where SITE_URL = " + "'" + db + "'" + '\n')
# qurey.write('\n')
qurey.write("use " + db + '\n')
qurey.write("if not exists (select keywordvalue from systeminfo where KEYWORD = 'org_cache_copies')" + '\n')
qurey.write("BEGIN" + '\n')
qurey.write(" insert into systeminfo (KEYWORD, KEYWORDVALUE) values ('org_cache_copies', 3)" + '\n')
qurey.write("END" + '\n')
qurey.write("else" + '\n')
qurey.write("BEGIN" + '\n')
# qurey.write(" update systeminfo set keywordvalue = 3 where keyword = 'org_cache_copies'" + '\n')
qurey.write(" delete from systeminfo where keyword = 'org_cache_copies'" + '\n')
qurey.write("END" + '\n')
qurey.write("select * from systeminfo where KEYWORD = 'org_cache_copies'" + '\n')
qurey.write('\n')
# with open(r"/Users/ajia/Documents/tmp/STG_AddTaxSettings.txt", 'a+') as qurey:
# qurey.write("use Activenetsites" + '\n')
# qurey.write("select SITE_URL, IS_TAX_EXEMPT from ORGS where SITE_URL in (" )
# for db in DB_Name:
# qurey.write("\'" + db +"\'")
# if db not in 'lstgymcasatx':
# qurey.write(", ")
# qurey.write(")" +'\n')
#
# with open(r"/Users/ajia/Documents/tmp/STG_AddTaxSettings.txt", 'a+') as qurey:
# for db in DB_Name:
# qurey.write("use " + db + '\n')
# qurey.write("select * from systeminfo where keyword = \'tax_service_activated\'" + '\n' )
# qurey.write('\n')
if __name__ == '__main__':
main()<file_sep>/PythonProjects/PerfEnvRelated/StandardPool_InitailOrg.py
# -*- coding: utf-8 -*-
# !/usr/bin/python
import sys
import requests
import threading
import logging
import time
logging.basicConfig(level=logging.CRITICAL, format=' %(asctime)s - %(name)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)
# logging.disable(logging.CRITICAL)
def getResponse(url):
requests.adapters.DEFAULT_RETRIES = 5
time_start = time.time()
response = requests.get(url)
# response.raise_for_status()
time_end = time.time()
initial_time = time_end - time_start
rsc = response.status_code
serverNo = url[22:24]
print("Server : %s ; Status_Code : %r ; initial time : %r s ; hosturl : %s" % (serverNo, rsc, initial_time, url))
if __name__ == '__main__' :
# logging.debug('start of program')
logger.debug("start main program")
logger.debug("processing...")
thread = []
orgname = ['perf01', 'perf01jetty', 'perf02', 'perf03', 'acm01vegas']
# orgname = ['acm01vegas']
# orgname = sys.argv[1]
for org in orgname:
for i in range(19, 21):
logger.debug("current org name: %r" %(org))
if (i < 10):
urlstr = "http://perf-activenet-0"+str(i)+"w.an.active.tan:3000/"+org+"/servlet/adminlogin.sdi"
# logging.debug('i is '+ str(i) + ' , url is ' + urlstr)
else:
urlstr = "http://perf-activenet-"+str(i)+"w.an.active.tan:3000/"+org+"/servlet/adminlogin.sdi"
# logging.debug('i is ' + str(i) + ' , url is ' + urlstr)
a = threading.Thread(target=getResponse, args=(urlstr,))
a.start()
logger.debug("end of main")
<file_sep>/PythonProjects/PerfEnvRelated/RushPool_ClearOldVersion.py
# -*- coding: utf-8 -*-
# !/usr/bin/python
import sys
import paramiko
import threading
import time
def ssh2(host, username, passwd, cmd):
try:
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(host, 22, username, passwd, timeout=5)
for m in cmd:
stdin, stdout, stderr = ssh.exec_command(m)
# stdin.write("Y") #简单交互,输入 ‘Y’
out = stdout.readlines()
# 屏幕输出
for o in out:
#print(o)
print("%s service status : %s" % (host, o))
#print('%s\t start service OK\n' %(host))
ssh.close()
except:
print('%s\tError\n' %(host))
if __name__ == '__main__':
orgnames = ["acm01vegasjetty", "acm01vegas", "perf01", "perf02", "perf01jetty", "ignite01", "share" ]
versionString = "20.04*"
for orgname in orgnames:
cmdString = "cd /opt/active/ActiveNet/perf/" + orgname + ";rm -rf " + versionString
cmd =[]
cmd.append(cmdString) # 你要执行的命令列表
print("\033[0;37;40m%s\033[0m" % (orgname))
print(cmd)
username = "deploy" # 用户名
passwd = "<PASSWORD>" # 密码
print("Begin......")
time1 = time.time()
for i in range(1, 2):
if i < 10:
host = 'perf-activenet-0' + str(i) + 'w.an.active.tan'
else:
host = 'perf-activenet-' + str(i) + 'w.an.active.tan'
ssh2(host, username, passwd, cmd)
time2 = time.time()
totalTime = time2 - time1
print("Total time :" + str(totalTime))
print("Finish......")
<file_sep>/Neoload LG Upgrade/agent.properties
#Agent properties
#Wed Mar 05 23:05:30 CET 2003
#
[Network]
multicast.address=192.168.3.11
multicast.address6=fc00:db20:35b:7399::5:14:15
multicast.port=1359
agent.server.port=7100
connection.ssl=negotiate
#ipfw.dir=@<EMAIL>@
ipfw.dir=/opt/neoload622/tools/ipfw/linux/
socket.network.buffer.size=32768
agent.client.call.timeout=120000
[Polling]
polling.controller.addresses=
polling.controller.port=4569
polling.delay=10000
[LoadGenerator]
lg.home=.
lg.launcher=./jre/bin/java
#lg.launcher.vm.parameters=-server -XX:MaxRAMFraction=2 -Xss228k -Xbootclasspath/p:lib/alpn-boot.jar
lg.launcher.vm.parameters=-server -XX:MaxRAMFraction=2 -Xmx12g -Xss228k -Xbootclasspath/p:lib/alpn-boot.jar
lg.start.delay=20000
[Logs]
collected.lg.logs.size.max=10485760
<file_sep>/PythonProjects/PerfEnvRelated/RushPool_NewCUI_ChangeLogLevel.py
# -*- coding: utf-8 -*-
# !/usr/bin/python
import paramiko
import datetime
import threading
import os
def upload(host, username, passwd, local, remote, file_type):
try:
trans = paramiko.Transport((host, 22))
trans.connect(username=username, password=<PASSWORD>)
sftp = paramiko.SFTPClient.from_transport(trans)
if file_type == 1:
print(' upload file on %s Start %s ' % (host, datetime.datetime.now()))
sftp.put(local, remote)
print('upload file on %s End %s ' % (host, datetime.datetime.now()))
elif file_type == 2:
files = os.listdir(local)
for f in files:
print(' upload file %s on %s Start %s ' % (str(f), host, datetime.datetime.now()))
sftp.put(os.path.join(local, f), os.path.join(remote, f))
print('upload file %s on %s End %s ' % (str(f), host, datetime.datetime.now()))
else:
raise Exception('invalid document type')
except Exception as e:
print('%s\t connect error\n' % (host))
print("-----------ExceptLog-----------")
print(e)
finally:
trans.close()
def download(host, username, passwd, local, remote, file_type):
try:
trans = paramiko.Transport((host, 22))
trans.connect(username=username, password=<PASSWORD>)
sftp = paramiko.SFTPClient.from_transport(trans)
if file_type == 1:
print(' download file on %s Start %s ' % (host, datetime.datetime.now()))
sftp.get(remote, local)
print('download file on %s End %s ' % (host, datetime.datetime.now()))
elif file_type == 2:
files = sftp.listdir(remote)
for f in files:
print(' download file %s on %s Start %s ' % (str(f), host, datetime.datetime.now()))
sftp.get(os.path.join(remote, f), os.path.join(local, f))
print('download file %s on %s End %s ' % (str(f), host, datetime.datetime.now()))
else:
raise Exception('invalid document type')
except Exception as e:
print('%s\t connect error\n' % (host))
print("-----------ExceptLog-----------")
print(e)
finally:
trans.close()
if __name__ == '__main__':
username = "deploy" # 用户名
passwd = "<PASSWORD>" # 密码
local_newcui_serviceproperties_download = '/Users/ajia/Documents/tmp/Settings/NewCUILogLevel/old/service.properties'
local_newcui_serviceproperties_upload = '/Users/ajia/Documents/tmp/Settings/NewCUILogLevel/new/service.properties'
remote_newcui_serviceproperties = '/opt/active/sites/acm01vegasjetty/ActiveNetCUI/config/service.properties'
# print("Download files......")
#
# for i in range(1, 2):
# # for i in range(20, 21):
#
# if i < 10:
# host_newcui = 'perf-activenet-cui-0' + str(i) + 'w.an.active.tan'
# else:
# host_newcui = 'perf-activenet-cui-' + str(i) + 'w.an.active.tan'
#
# print(host_newcui)
#
# # download(host_newcui, username, passwd, local_jettyxml, remote_jettyxml, 1)
# download(host_newcui, username, passwd, local_newcui_serviceproperties_download, remote_newcui_serviceproperties, 1)
print("Upload files......")
for i in range(1, 9):
# for i in range(20, 21):
if i < 10:
host_newcui = 'perf-activenet-cui-0' + str(i) + 'w.an.active.tan'
else:
host_newcui = 'perf-activenet-cui-' + str(i) + 'w.an.active.tan'
print(host_newcui)
# upload(host_newcui, username, passwd, local_jettyxml_new, remote_jettyxml, 1)
upload(host_newcui, username, passwd, local_newcui_serviceproperties_upload, remote_newcui_serviceproperties, 1)
<file_sep>/PythonProjects/PerfEnvRelated/CheckCPUType.py
# -*- coding: utf-8 -*-
# !/usr/bin/python
import paramiko
import threading
def ssh2(host, username, passwd, cmd):
try:
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(host, 22, username, passwd, timeout=5)
for m in cmd:
stdin, stdout, stderr = ssh.exec_command(m)
# stdin.write("Y") #简单交互,输入 ‘Y’
out = stdout.readlines()
# 屏幕输出
for o in out:
print("%s : %s" % (host, o))
# print("\033[0;37;40m%s\033[0m : %s" % (host, o))
#print('%s\t start service OK\n' %(host))
ssh.close()
except:
print('%s\tError\n' %(host))
if __name__ == '__main__':
# cmd = ['pwd'] # 你要执行的命令列表
cmd_corenumber = ['cat /proc/cpuinfo | grep "processor" | sort | uniq | wc -l'] # CPU核心数
cmd_CPUtype = ['cat /proc/cpuinfo | grep name | sort | uniq'] # 查看CPU型号
cmd_OSVersion = ['cat /etc/issue'] # 查看CPU型号
cmd2 = ['cat /proc/cpuinfo | grep "physical id" | sort | uniq | wc -l;cat /proc/cpuinfo | grep "core id" | sort | uniq | wc -l;cat /proc/cpuinfo | grep "processor" | sort | uniq | wc -l'] # 你要执行的命令列表
# cat /proc/cpuinfo | grep name | sort | uniq 查看CPU型号
# cat /proc/cpuinfo | grep "physical id" 查看物理CPU数目1
# cat /proc/cpuinfo | grep "physical id" | sort | uniq | wc -l 查看物理CPU数目2(推荐)
###
# cat /proc/cpuinfo | grep "physical id" | sort | uniq | wc -l 物理CPU个数
# cat /proc/cpuinfo | grep "core id" | sort | uniq | wc -l CPU核数
# cat /proc/cpuinfo | grep "processor" | sort | uniq | wc -l CPU核心数
###
username = "deploy" # 用户名
passwd = "<PASSWORD>" # 密码
threads = [] # 多线程
print("Start Checking......")
for i in range(1, 9):
if i < 10:
# perf-activenet-cui-01w.an.active.tan
host1 = 'perf-activenet-cui-0' + str(i) + 'w.an.active.tan'
else:
host1 = 'perf-activenet-cui-' + str(i) + 'w.an.active.tan'
# for i in range(1, 19):
#
# if i < 10:
# host1 = 'perf-activenet-0' + str(i) + 'w.an.active.tan'
# else:
# host1 = 'perf-activenet-' + str(i) + 'w.an.active.tan'
# a = threading.Thread(target=ssh2, args=(host1, username, passwd, cmd_CPUtype))
a = threading.Thread(target=ssh2, args=(host1, username, passwd, cmd_corenumber))
a.start()
<file_sep>/Neoload JARs/ActiveNetOpenAPI/src/com/activenet/openapi/MasheryAuthentication.java
package com.activenet.openapi;
import java.nio.charset.StandardCharsets;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.time.Instant;
public class MasheryAuthentication {
private static String sha256Hash(String message) {
MessageDigest md = null;
try {
md = MessageDigest.getInstance("SHA-256");
} catch (NoSuchAlgorithmException e) {
e.printStackTrace();
}
md.update(message.getBytes(StandardCharsets.UTF_8));
byte[] hash = md.digest();
StringBuffer hexString = new StringBuffer();
for (int i = 0; i < hash.length; i++) {
String hex = Integer.toHexString(0xff & hash[i]);
if (hex.length() == 1)
hexString.append('0');
hexString.append(hex);
}
return hexString.toString();
}
private static String getTimestamp() {
Instant instant = Instant.now();
long timeStampMillis = instant.getEpochSecond();
return String.valueOf(timeStampMillis);
}
public static String getAuthenticationToken(String apiKey,String sharedSecret) {
String timestamp = getTimestamp();
return sha256Hash(apiKey + sharedSecret + timestamp);
}
public static void main(String[] args) {
String apiKey = "<KEY>";
String sharedSecret = "<KEY>";
System.out.println(getAuthenticationToken(apiKey,sharedSecret));
}
}
<file_sep>/RefreshPerfDB/MakeCPDtoPerfOrg/22_PCI_Captcha.sql
-- OrgDB setting script
delete from dbo.systeminfo where keyword = 'enable_pci_captcha'
INSERT into dbo.SYSTEMINFO
(KEYWORD, KEYWORDVALUE)
VALUES
('enable_pci_captcha', 'false')
select *
from dbo.systeminfo
where keyword = 'enable_pci_captcha'
if not exists (select KEYWORDVALUE from dbo.systeminfo where keyword = 'enable_pci_captcha')
begin
insert into systeminfo (KEYWORD,KEYWORDVALUE) VALUES ('enable_pci_captcha','false')
end
else
begin
update systeminfo set KEYWORDVALUE = 'false' where KEYWORD = 'enable_pci_captcha'
end
select * from dbo.systeminfo where keyword = 'enable_pci_captcha'
-- Sites DB setting
delete from dbo.systeminfo where keyword = 'enable_pci_captcha'
INSERT into dbo.SYSTEMINFO
(CONFIGURATION_ID,KEYWORD, KEYVALUE)
VALUES
(1, 'enable_pci_captcha', 'false')
select *
from dbo.systeminfo
where keyword = 'enable_pci_captcha'
if not exists (select KEYWORDVALUE from dbo.systeminfo where keyword = 'enable_pci_captcha')
begin
insert into systeminfo (KEYWORD,KEYWORDVALUE) VALUES ('enable_pci_captcha','false')
end
else
begin
update systeminfo set KEYWORDVALUE = 'false' where KEYWORD = '<PASSWORD>'
end
select * from dbo.systeminfo where keyword = 'enable_pci_captcha'
<file_sep>/PythonProjects/PerfEnvRelated/BigIPDecoding.py
import struct
def main():
print("staring")
#839518730.47873.0000
# cookie_string = "1401452298.0.0000"
# cookie_string = "1535670026.0.0000"
cookie_string = "2217441034.41759.0000"
# cookie_string = "1703442186.0.0000"
(cookie_ip, cookie_port, end) = cookie_string.split('.')
(ip1, ip2, ip3, ip4) = (i for i in struct.pack("<I", int(cookie_ip)))
p = [i for i in struct.pack("<I", int(cookie_port))]
serverport = p[0]*256 + p[1]
server_ip = str(ip1) + "." + str(ip2) + "." + str(ip3) + "." + str(ip4)
print("server_ip : %s ; server_port : %s " %(server_ip, str(serverport)))
if __name__ == '__main__':
main()
<file_sep>/PythonProjects/PerfEnvRelated/GetOrgIgniteMode.py
# -*- coding: utf-8 -*-
# !/usr/bin/python
import requests
import re
import time
import logging
logging.basicConfig(level=logging.DEBUG, format=' %(asctime)s - %(levelname)s - %(message)s')
logging.disable(logging.DEBUG)
def getMode(url):
requests.adapters.DEFAULT_RETRIES = 3
response = requests.get(url)
response.raise_for_status()
# print(response.text)
mode = re.search( r"This Org CacheMode -> \[(.*?)\]",response.text).group(1)
return mode
if __name__ == '__main__' :
logging.debug('start...')
thread = []
# orgname = ['perf01', 'perf01jetty', 'perf02', 'perf03', 'acm01vegas', 'acm01vegasjetty']
orgname = ['acm01vegasjetty']
for org in orgname:
urlstr = "https://anperf01.active.com/" + org + "/servlet/ignitemetrics.sdi"
print("Current ignie mode of " + org +" is " + str(getMode(urlstr,)) )
logging.debug('end.')
<file_sep>/RefreshPerfDB/MobileAPI-Perf01Jetty/11_SetWaiverAsNotRequired.sql
-- No SQL, do it manually<file_sep>/UsefulJS/Make_ModuleIDs.js
// Javascript skeleton.
// Edit and adapt to your needs.
// The documentation of the NeoLoad Javascript API
// is available in the appendix of the documentation.
// Get variable value from VariableManager
var count = context.variableManager.getValue("ARSummary_ModuleID_ID_matchNr");
if (count==null) {
context.fail("Variable 'ARSummary_ModuleID_ID' not found");
}
var Ids = "";
if (count > 0 ) {
for (var i=1; i<=count; i++)
{
Ids = Ids + "\"" + context.variableManager.getValue("ARSummary_ModuleID_ID_" + i) +"\"" +",";
}
Ids = Ids.slice(0,-1);
logger.debug("Ids:" + Ids);
}
var IdList = Ids;
var IdListEncode = IdList;
var IdListGetLink = IdList
// Generate ID list
IdList = IdList.replace(/"/g,"");
IdList = IdList.replace(/,/g,"&ModuleID=");
IdList = "ModuleID=" + IdList;
logger.debug("IDs:" + IdList);
// Generate Encoded ID list
IdListEncode = IdListEncode.replace(/"/g,"");
IdListEncode = IdListEncode.replace(/,/g,"%2C%20");
logger.debug("IdListEncode:" +IdListEncode);
// Generate Get request link ID list
IdListGetLink = IdListGetLink.replace(/"/g,"");
IdListGetLink = IdListGetLink.replace(/,/g,"%2C+");
logger.debug("IdListGetLink:" +IdListGetLink);
context.variableManager.setValue("ARSummary_ModuleID_IDs",Ids );
context.variableManager.setValue("ARSummary_ModuleID_List",IdList );
context.variableManager.setValue("ARSummary_ModuleID_Encode",IdListEncode );
context.variableManager.setValue("ARSummary_ModuleID_GetLink",IdListGetLink );<file_sep>/RefreshPerfDB/MakeCPDtoPerfOrg/16_ActivitySearchRedesign.sql
use acm01vegasjetty
if not exists (select keywordvalue from systeminfo where KEYWORD = 'enable_new_cui_activity_search_redirect')
BEGIN
insert into systeminfo (KEYWORD,KEYWORDVALUE) VALUES ('enable_new_cui_activity_search_redirect','true')
END
else
BEGIN
update systeminfo set KEYWORDVALUE = 'true' where KEYWORD = 'enable_new_cui_activity_search_redirect'
END
select * from systeminfo where KEYWORD = 'enable_new_cui_activity_search_redirect'
<file_sep>/PythonProjects/PerfEnvRelated/yaml_opr.py
import threading
import paramiko
import yaml
import os
import logging
logging.basicConfig(level=logging.CRITICAL, format=' %(asctime)s - %(name)s - %(levelname)s - %(message)s')
# logging.basicConfig(level=logging.DEBUG, format=' %(asctime)s - %(name)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)
def yaml_opr():
#open yaml file
with open("env_properties.yaml", "r", encoding="utf-8") as file:
yaml_data = yaml.load(file.read(), Loader=yaml.FullLoader)
print(type(yaml_data))
# print(yaml_data)
print(yaml_data['ENV']['perf_rushpool_AUI']['NODES'])
def ssh2(host, username, passwd, cmd):
try:
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(host, 22, username, passwd, timeout=5)
for m in cmd:
stdin, stdout, stderr = ssh.exec_command(m)
# stdin.write("Y") #简单交互,输入 ‘Y’
out = stdout.readlines()
# 屏幕输出
for o in out:
print("%s : %s" % (host, o))
# print("\033[0;37;40m%s\033[0m : %s" % (host, o))
#print('%s\t start service OK\n' %(host))
ssh.close()
except:
print('%s\tError\n' %(host))
def check_JVM(env):
# perf_rushpool_AUI
# perf_standardpool_AUI
# perf_NewCUI
# perf_ignite
with open("env_properties.yaml", "r", encoding="utf-8") as file:
yaml_data = yaml.load(file.read(), Loader=yaml.FullLoader)
nodes_amount = yaml_data['ENV'][env]['NODES']
logger.debug("nodes amount : %r" %(nodes_amount))
username = "deploy" # 用户名
passwd = "<PASSWORD>" # 密码
if env == 'perf_rushpool_AUI':
cmd_jvm = ['ps -ef|grep java|grep -v grep|grep ActiveNetServlet1'] # 你要执行的命令列表
for i in range(1, (nodes_amount+1)):
if i < 10:
host_name = yaml_data['ENV'][env]['PREHOST'] + '0' + str(i) + yaml_data['ENV'][env]['DOMAIN']
else:
host_name = yaml_data['ENV'][env]['PREHOST'] + str(i) + yaml_data['ENV'][env]['DOMAIN']
logger.debug("host name : %s " %(host_name))
a = threading.Thread(target=ssh2, args=(host_name, username, passwd, cmd_jvm))
a.start()
elif env == 'perf_standardpool_AUI':
cmd_jvm = ['ps -ef|grep java|grep -v grep|grep ActiveNetServlet1'] # 你要执行的命令列表
for i in range(19, (19 + nodes_amount)):
if i < 10:
host_name = yaml_data['ENV'][env]['PREHOST'] + '0' + str(i) + yaml_data['ENV'][env]['DOMAIN']
else:
host_name = yaml_data['ENV'][env]['PREHOST'] + str(i) + yaml_data['ENV'][env]['DOMAIN']
logger.debug("host name : %s " % (host_name))
a = threading.Thread(target=ssh2, args=(host_name, username, passwd, cmd_jvm))
a.start()
elif env == 'perf_NewCUI':
cmd_jvm = ['ps -ef|grep java|grep -v grep|grep ActiveNetCUI'] # 你要执行的命令列表
for i in range(1, (nodes_amount+1)):
if i < 10:
host_name = yaml_data['ENV'][env]['PREHOST'] + '0' + str(i) + yaml_data['ENV'][env]['DOMAIN']
else:
host_name = yaml_data['ENV'][env]['PREHOST'] + str(i) + yaml_data['ENV'][env]['DOMAIN']
logger.debug("host name : %s " % (host_name))
a = threading.Thread(target=ssh2, args=(host_name, username, passwd, cmd_jvm))
a.start()
elif env == 'perf_ignite':
cmd_jvm = ['ps -ef|grep java|grep -v grep|grep ActiveNetServlet1'] # 你要执行的命令列表
for i in range(1, (nodes_amount+1)):
if i < 10:
host_name = yaml_data['ENV'][env]['PREHOST'] + '0' + str(i) + yaml_data['ENV'][env]['DOMAIN']
else:
host_name = yaml_data['ENV'][env]['PREHOST'] + str(i) + yaml_data['ENV'][env]['DOMAIN']
logger.debug("host name : %s " % (host_name))
a = threading.Thread(target=ssh2, args=(host_name, username, passwd, cmd_jvm))
a.start()
else:
print("error env found")
if __name__ == '__main__':
# perf_rushpool_AUI
# perf_standardpool_AUI
# perf_NewCUI
# perf_ignite
# check_JVM('perf_rushpool_AUI')
# check_JVM('perf_standardpool_AUI')
# check_JVM('perf_NewCUI')
check_JVM('perf_ignite')
<file_sep>/RefreshPerfDB/RefreshSTGDB/1_UpdateEnvironmentInfo.sql
update systeminfo set keywordvalue = 'Stage'
where keyword = 'environment_identifier'<file_sep>/PythonProjects/PerfEnvRelated/venv/lib/python3.7/site-packages/pypsexec-0.1.0.dist-info/DESCRIPTION.rst
Python PsExec Library
=====================
|License| |Travis Build| |AppVeyor Build| |Coverage|
This library can run commands on a remote Windows host through Python.
This means that it can be run on any host with Python and does not
require any binaries to be present or a specific OS. It uses SMB/RPC to
executable commands in a similar fashion to the popular PsExec tool.
The executable wrapper that is sent to the service is based on the
`PAExec <https://github.com/poweradminllc/PAExec>`__ library. PAExec is
an free, redistributable and open source equivalent to Microsoft’s
`PsExec <https://docs.microsoft.com/en-us/sysinternals/downloads/psexec>`__
application. This program is stored as a binary in this package and is
used to run the remote service and start the process execution.
I would like to thank the developers of Power Admin for creating this
library as it has made this library a lot less complex than what it
would have been.
Features
--------
With pypsexec you can run commands of a remote Windows host like you
would with PsExec. Current you can use pypsexec to do the following;
- Run as a specific local or domain user or the user
- Run as the local SYSTEM account
- Run as an interactive process
- Specify the session the interactive process should run on
- Specify the run level of the user token, ``highest`` or ``limited``
- Set the priority of the process
- Set a timeout for the remote process
- Send input through the stdin pipe to the running process
- Set the processors the process can run on
Further Info
------------
While this info is not necessary for you to use this library it can help
people understand what is happening under the hood. This library runs
the following steps when running a command;
- Create an SMB connection to the host
- Copies across the PAExec binary to the ``ADMIN$`` share of the remote
host
- Binds the Windows Service Manager to the opened ``IPC$`` tree using
RPC
- Creates and starts a Windows service as the ``SYSTEM`` account to run
the binary copied
- Connect to the PAExec named pipe the service creates
- Sends the process details to the PAExec service through the pipe
- Send a request to the PAExec service to start the process based on
the settings sent
- Connect to the newly spawned process’s stdout, stderr, stdin pipe (if
not interactive or async)
- Read the stdout/stderr pipe until the process is complete
- Get the return code of the new process
- Stop and remove the PAExec service
- Remove the PAExec binary from the ``ADMIN$`` share
- Disconnects from the SMB connection
In the case of a failed process, the PAExec service and binary may not
be removed from the host and may need to be done manually. This is only
the case for a critical error or the cleanup functions not being called.
By default the data being sent to and from the server is encrypted to
stop people listening in on the network from snooping your data.
Unfortunately this uses SMB encryption which was added in the SMB 3.x
dialects so hosts running Windows 7, Server 2008, or Server 2008 R2 will
not work with encryption.
This means that any data sent over the wire on these older versions of
Windows is viewable by anyone reading those packets. Any input or output
of the process comes through these packets so any secrets sent over the
network won’t be encrypted. PAExec tries to reduce this risk by doing a
simple XOR scramble of the settings set in ``run_executable`` so it
isn’t plaintext but it can be decoded by someone who knows the protocol.
Requirements
------------
- Python 2.7, 2.7, 3.4-3.6
- `smbprotocol <https://github.com/jborean93/smbprotocol>`__
To install pypsexec, simply run
``pip install pypsexec``
This will download the required packages that are required and get your
Python environment ready to do.
Out of the box, pypsexec supports authenticating to a Windows host with
NTLM authentication but users in a domain environment can take advantage
of Kerberos authentication as well for added security. Currently the
Windows implementation of the smbprotocol does not support Kerberos auth
but for other platforms you can add support by installing the kerberos
components of ``smbprotocol``;
::
# for Debian/Ubuntu/etc:
sudo apt-get install gcc python-dev libkrb5-dev
pip install smbprotocol[kerberos]
# for RHEL/CentOS/etc:
sudo yum install gcc python-devel krb5-devel krb5-workstation python-devel
pip install smbprotocol[kerberos]
From there to check that everything was installed correctly and the
correct GSSAPI extensions are available on that host, run
::
try:
from gssapi.raw import inquire_sec_context_by_oid
print("python-gssapi extension is available")
except ImportError as exc:
print("python-gssapi extension is not available: %s" % str(exc))
If it isn’t available, then either a newer version of the system’s
gssapi implementation needs to be setup and python-gssapi compiled
against that newer version.
Remote Host Requirements
------------------------
The goal of this package to be able to run executables on a vanilla
remote Windows host with as little setup as possible. Unfortunately
there is still some setup required to get working depending on the OS
version and type that is being used. What pypsexec requires on the host
is;
- SMB to be up and running on the Windows port and readable from the
Python host
- The ``ADMIN$`` share to be enabled with read/write access of the user
configured
- The above usually means the configured user is an administrator of
the Windows host
- At least SMB 2 on the host (Server 2008 and newer)
Firewall Setup
~~~~~~~~~~~~~~
By default, Windows blocks the SMB port 445 and it needs to be opened up
before pypsexec can connect to the host. To do this run either one of
the following commands;
::
# PowerShell (Windows 8 and Server 2012 or Newer)
Set-NetFirewallRule -Name FPS-SMB-In-TCP -Enabled True
# CMD (All OS's)
netsh advfirewall firewall set rule name="File and Printer Sharing (SMB-In)" dir=in new enable=Yes
This will open up inbound traffic to port ``445`` which is used by SMB.
User Account Control
~~~~~~~~~~~~~~~~~~~~
On the desktop variants of Windows (7, 8, 10), UAC is enabled by default
and is set to filter a network logon of a local account of their
Administrative rights. Unfortunately pypsexec requires these rights to
both copy the executable to the ``ADMIN$`` share as well as create the
PAExec service on the host. With the default setting it will receive an
``ACCESS_IS_DENIED`` response when attempting either of the 2 as it’s
token does not have Administrative rights.
To get it working on these OS’, either configure UAC to not filter local
account tokens from a network logon or disable UAC entirely. Disabling
UAC is definitely an extreme step and should be avoided if possible but
disabling local token filtering means any network logons of an
Administrator account now gets the full rights of that user. To disable
local token filter run the following;
::
$reg_path = "HKLM:\SOFTWARE\Microsoft\Windows\CurrentVersion\Policies\System"
$reg_prop_name = "LocalAccountTokenFilterPolicy"
$reg_key = Get-Item -Path $reg_path
$reg_prop = $reg_key.GetValue($reg_prop_name)
if ($null -ne $reg_prop) {
Remove-ItemProperty -Path $reg_path -Name $reg_prop_name
}
New-ItemProperty -Path $reg_path -Name $reg_prop_name -Value 1 -PropertyType DWord
To disable UAC entirely, run the following;
::
$reg_path = "HKLM:\SOFTWARE\Microsoft\Windows\CurrentVersion\Policies\System"
$reg_prop_name = "EnableLUA"
$reg_key = Get-Item -Path $reg_path
$reg_prop = $reg_key.GetValue($reg_prop_name)
if ($null -ne $reg_prop) {
Remove-ItemProperty -Path $reg_path -Name $reg_prop_name
}
New-ItemProperty -Path $reg_path -Name $reg_prop_name -Value 0 -PropertyType DWord
After running either of these scripts, the Windows host needs to be
rebooted before the policies are enacted.
Examples
--------
Here is an example of how to run a command with this library
::
from pypsexec.client import Client
# creates an encrypted connection to the host with the username and password
c = Client("hostname", username="username", password="<PASSWORD>")
# set encrypt=False for Windows 7, Server 2008
c = Client("hostname", username="username", password="<PASSWORD>", encrypt=False)
# if Kerberos is available, this will use the default credentials in the
# credential cache
c = Client("hostname")
# you can also tell it to use a specific Kerberos principal in the cache
# without a password
c = Client("hostname", username="<EMAIL>@<EMAIL>")
c.connect()
try:
c.create_service()
# After creating the service, you can run multiple exe's without
# reconnecting
# run a simple cmd.exe program with arguments
stdout, stderr, rc = c.run_executable("cmd.exe",
arguments="/c echo Hello World")
# run whoami.exe as the SYSTEM account
stdout, stderr, rc = c.run_executable("whoami.exe", use_system_account=True)
# run command asynchronously (in background), the rc is the PID of the spawned service
stdout, stderr, rc = c.run_executable("longrunning.exe",
arguments="/s other args",
asynchronous=True)
# run whoami.exe as a specific user
stdout, stderr, rc = c.run_executable("whoami",
arguments="/all",
username="local-user",
password="<PASSWORD>",
run_elevated=True)
finally:
c.remove_service()
c.disconnect()
In the case of a fatal failure, this project may leave behind some the
PAExec payload in ``C:\Windows`` or the service still installed. As
these are uniquely named they can build up over time. They can be
manually removed but you can also use pypsexec to cleanup them all up at
once. To do this run
::
from pypsexec.client import Client
c = Client("server", username="username", password="<PASSWORD>")
c.connect()
c.cleanup() # this is where the magic happens
c.disconnect()
The script will delete any files that match ``C:\Windows\PAExec-*`` and
any services that match ``PAExec-*``. For an individual run, the
``remove_service()`` function should still be used.
Client Options
~~~~~~~~~~~~~~
When creating the main pypsexec ``Client`` object there are some
configuration options that can be set to control the process. These args
are;
- ``server``: This needs to be set and is the host or IP address of the
server to connect to
- ``username``: The username to connect with. Can be ``None`` if
``python-gssapi`` is installed and a ticket has been granted in the
local credential cache
- ``password``: The password for ``username``. Can be ``None`` if
``python-gssapi`` is installed and a ticket has been granted for the
user specified
- ``port``: Override the default port of ``445`` when connecting to the
server
- ``encrypt``: Whether to encrypt the messages or not, default is
``True``. Server 2008, 2008 R2 and Windows 7 hosts do not support SMB
Encryption and need this to be set to ``False``
Run Executable Options
~~~~~~~~~~~~~~~~~~~~~~
When calling ``run_executable``, there are multiple kwargs that can
define how the remote process will work. These args are;
- ``executable``: (string) The path to the executable to be run
- ``arguments``: (string) Arguments for the executable
- ``processors``: (list) A list of processor numbers that the process
can run on
- ``asynchronous``: (bool) Doesn’t wait until the process is complete
before returning. The ``rc`` returned by the function is the ``PID``
of the async process, default is ``False``
- ``load_profile``: (bool) Load the user’s profile, default is ``True``
- ``interactive_session``: (int) The session ID to display the
interactive process when ``interactive=True``, default is ``0``
- ``interactive``: (bool) Runs the process as an interactive process.
The stdout and stderr buffers will be ``None`` if ``True``, default
``False``
- ``run_elevated``: (bool) When ``username`` is defined, will elevated
permissions, default ``False``
- ``run_limited``: (bool) When ``username`` is defined, will run the
process under limited permissions, default ``False``
- ``username``: (string) Used to run the process under a different user
than the one that authenticated the SMB session
- ``password``: (string) The password for ``<EMAIL>``
- ``use_system_account``: (bool) Run the process as
``NT AUTHORITY\SYSTEM``
- ``working_dir``: (string) The working directory of the process,
default ``C:\Windows\System32``
- ``show_ui_on_win_logon``: (bool) Displays the UI on the Winlogon
secure desktop when ``use_system_account=True``, default ``False``
- ``priority``: (pypsexec.ProcessPriority) The priority level of the
process, default ``NORMAL_PRIORITY_CLASS``
- ``remote_log_path``: (string) A path on the remote host to log the
PAExec service details
- ``timeout_seconds``: (int) The maximum time the process can run for,
default is ``0`` (no timeout)
- ``stdout``: (pipe.OutputPipe) A class that implements pipe.OutputPipe
that controls how the stdout output is processed and returned, will
default to returning the byte string of the stdout. Is ignored when
``interactive=True`` and ``asynchronous=True``
- ``stderr``: (pipe.OutputPipe) A class that implements pipe.OutputPipe
that controls how the stderr output is processed and returned, will
default to returning the byte string of the stderr. Is ignored when
``interactive=True`` and ``asynchronous=True``
- ``stdin``: (bytes/generator) A byte string or generator that yields a
byte string to send over the stdin pipe, does not work with
``interactive=True`` and ``asynchronous=True``
Logging
-------
This library uses the builtin Python logging library and can be used to
find out what is happening in the pypsexec process. Log messages are
logged to the ``pypsexec`` named logger as well as ``pypsexec.*`` where
``*`` is each python script in the ``pypsexec`` directory.
A way to enable the logging in your scripts through code is to add the
following to the top of the script being used;
::
import logging
logger = logging.getLogger("pypsexec")
logger.setLevel(logging.DEBUG) # set to logging.INFO if you don't want DEBUG logs
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - '
'%(message)s')
ch.setFormatter(formatter)
logger.addHandler(ch)
These logs are generally useful when debugging issues as they give you a
more step by step snapshot of what it is doing and what may be going
wrong. The debug level will also print out a human readable string of
each SMB packet that is sent out from the client but this level can get
really verbose.
Testing
-------
To this module, you need to install some pre-requisites first. This can
be done by running;
::
pip install -r requirements-test.txt
# you can also run tox by installing tox
pip install tox
From there to run the basic tests run;
::
py.test -v --pep8 --cov pypsexec --cov-report term-missing
# or with tox 2.7, 2.7, 3.4, 3.5, and 3.6
tox
There are extra tests that only run when certain environment variables
are set. To run these tests set the following variables;
- ``PYPSEXEC_SERVER``: The hostname or IP to a Windows host
- ``PYPSEXEC_USERNAME``: The username to use authenticate with
- ``PYPSEXEC_PASSWORD``: The password for ``PYPSEXEC_USERNAME``
From there, you can just run ``tox`` or ``py.test`` with these
environment variables to run the integration tests.
Future
------
Some things I would be interested in looking at adding in the future
would be
- Add a Python script that can be called to run adhoc commands like
``PsExec.exe``
.. |License| image:: https://img.shields.io/badge/license-MIT-blue.svg
:target: https://github.com/jborean93/pypsexec/blob/master/LICENSE
.. |Travis Build| image:: https://travis-ci.org/jborean93/pypsexec.svg
:target: https://travis-ci.org/jborean93/pypsexec
.. |AppVeyor Build| image:: https://ci.appveyor.com/api/projects/status/github/jborean93/pypsexec?svg=true
:target: https://ci.appveyor.com/project/jborean93/pypsexec
.. |Coverage| image:: https://coveralls.io/repos/jborean93/pypsexec/badge.svg
:target: https://coveralls.io/r/jborean93/pypsexec
<file_sep>/RefreshPerfDB/MakeCPDtoPerfOrg/19_FacilitySearchRedesign.sql
use ACM01vegasJetty
if not exists (select KEYWORDVALUE from systeminfo where keyword = 'enable_new_cui_reservation_redirect')
begin
Insert into SYSTEMINFO(KEYWORD, KEYWORDVALUE) values ('enable_new_cui_reservation_redirect', 'true')
end
else
begin
update systeminfo set KEYWORDVALUE = 'true' where keyword = 'enable_new_cui_reservation_redirect'
end
select * from systeminfo where keyword = 'enable_new_cui_reservation_redirect'
--check two license options "facility" "CUI Reservation to New Facility management module"
if not exists (select KEYWORDVALUE from systeminfo where KEYWORD = 'online_reservation_resource_search_skip_unavailable')
begin
insert into systeminfo (KEYWORD, KEYWORDVALUE) values ('online_reservation_resource_search_skip_unavailable', 'false')
end
else
begin
update SYSTEMINFO set KEYWORDVALUE = 'true' where KEYWORD = 'online_reservation_resource_search_skip_unavailable'
end
select * from systeminfo where keyword = 'online_reservation_resource_search_skip_unavailable'
if not exists (select KEYWORDVALUE from systeminfo where KEYWORD = 'online_reservation_resource_search_timeout_seconds')
begin
insert into systeminfo (KEYWORD, KEYWORDVALUE) values ('online_reservation_resource_search_timeout_seconds', 30)
end
else
begin
update SYSTEMINFO set KEYWORDVALUE = 30 where KEYWORD = 'online_reservation_resource_search_timeout_seconds'
end
select * from systeminfo where keyword = 'online_reservation_resource_search_timeout_seconds'
if not exists (select KEYWORDVALUE from systeminfo where KEYWORD = 'online_reservation_resource_search_skip_unavailable_threshold')
begin
insert into systeminfo (KEYWORD, KEYWORDVALUE) values ('online_reservation_resource_search_skip_unavailable_threshold', 100)
end
else
begin
update SYSTEMINFO set KEYWORDVALUE = 100 where KEYWORD = 'online_reservation_resource_search_skip_unavailable_threshold'
end
select * from systeminfo where keyword = 'online_reservation_resource_search_skip_unavailable_threshold'<file_sep>/MembershipUsageAPI_SQL.sql
select mu.MEMBERSHIPUSAGE_ID,mu.MEMBERSHIP_ID,mu.CUSTOMER_ID,m.PACKAGE_ID, mu.TIMEUSED
from MEMBERSHIP_USAGES mu WITH(NOLOCK)
join CUSTOMERS c WITH(NOLOCK) on mu.CUSTOMER_ID = c.CUSTOMER_ID
left join MEMBERSHIPS m WITH(NOLOCK) on mu.MEMBERSHIP_ID = m.MEMBERSHIP_ID
where mu.AUTHORIZED = -1 and mu.VOIDEDBY = 0<file_sep>/PythonProjects/PerfEnvRelated/StandardPool_CheckJVM.py
# -*- coding: utf-8 -*-
# !/usr/bin/python
import paramiko
import threading
def ssh2(host, username, passwd, cmd):
try:
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(host, 22, username, passwd, timeout=5)
for m in cmd:
stdin, stdout, stderr = ssh.exec_command(m)
# stdin.write("Y") #简单交互,输入 ‘Y’
out = stdout.readlines()
# 屏幕输出
for o in out:
#print(o)
print("%s : %s" % (host, o))
#print('%s\t start service OK\n' %(host))
ssh.close()
except Exception as e:
print('%s\tError\n %r' %(host, e))
if __name__ == '__main__':
cmd_servlet = ['ps -ef|grep java|grep -v grep|grep ActiveNetServlet1'] # 你要执行的命令列表
cmd_cache = ['ps -ef|grep java|grep -v grep|grep ActiveNetServlet1'] # 你要执行的命令列表
cmd_getorgname = [
'ps -ef|grep java|grep -v grep|grep ActiveNetServlet1 | awk \'{print $13}\' | awk -F \'/\' \'{print $5}\'']
username = "deploy" # 用户名
passwd = "<PASSWORD>" # 密码
threads = [] # 多线程
print("Begin......Servlet")
for i in range(19, 21):
if i < 10:
host = 'perf-activenet-0' + str(i) + 'w.an.active.tan'
else:
host = 'perf-activenet-' + str(i) + 'w.an.active.tan'
# a = threading.Thread(target=ssh2, args=(host, username, passwd, cmd_getorgname))
a = threading.Thread(target=ssh2, args=(host, username, passwd, cmd_servlet))
a.start()
# for i in range(1, 3):
#
# if i < 10:
# host = 'perf-activenet-0' + str(i) + 'w.an.active.tan'
# else:
# host = 'perf-activenet-' + str(i) + 'w.an.active.tan'
#
# c = threading.Thread(target=ssh2, args=(host, username, passwd, cmd))
# c.start()
print("Begin......Cache")
for i in range(3, 4):
if i < 10:
host = 'perf-ignite-0' + str(i) + 'w.an.active.tan'
else:
host = 'perf-ignite-' + str(i) + 'w.an.active.tan'
b = threading.Thread(target=ssh2, args=(host, username, passwd, cmd_cache))
b.start()
<file_sep>/PythonProjects/PerfEnvRelated/StagePool_ClearOldVersion.py
# -*- coding: utf-8 -*-
# !/usr/bin/python
import sys
import paramiko
import threading
import time
def ssh2(host, username, passwd, cmd):
try:
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(host, 22, username, passwd, timeout=5)
for m in cmd:
stdin, stdout, stderr = ssh.exec_command(m)
# stdin.write("Y") #简单交互,输入 ‘Y’
out = stdout.readlines()
# 屏幕输出
for o in out:
#print(o)
print("%s service status : %s" % (host, o))
#print('%s\t start service OK\n' %(host))
ssh.close()
except:
print('%s\tError\n' %(host))
if __name__ == '__main__':
orgnames = ["lstgapachejunction", "lstgbreckenridgerec", "lstgcampbellrecreation", "lstgchandleraz", "lstgchesterfieldparksrec",\
"lstgcityofcarlsbad", "lstgcityofcorona", "lstgcityofdowney", "lstgculpepercopandr", "lstgdenver",\
"lstgebparks", "lstgencinitasparksandrec", "lstgfalmouthcommunityprog", "lstgfpdccrecreation", "lstggepark",\
"lstggjparksandrec", "lstgindymca", "lstgkansascityymca", "lstglanguagestars", "lstglbparks",\
"lstgmesaaz", "lstgminneapolisparks", "lstgmontgomerycounty", "lstgmrurecreation", "lstgnaparec",\
"lstgnms", "lstgnorthshoreymca", "lstgomahaconservatory", "lstgoneteamkids", "lstgportlandparks",\
"lstgrightatschool", "lstgsanjoseparksandrec", "lstgsdparkandrec", "lstgsfcmprep", "lstgymcagreaterbrandywine",\
"lstgymcasatx"]
versionString = "19.02*"
for orgname in orgnames:
cmdString = "cd /opt/active/ActiveNet/stage/" + orgname + ";rm -rf " + versionString
cmd =[]
cmd.append(cmdString) # 你要执行的命令列表
print(orgname)
print(cmd)
username = "deploy" # 用户名
passwd = "<PASSWORD>" # 密码
print("Begin......")
time1 = time.time()
for i in range(1, 2):
if i < 10:
host = 'stage-activenet-0' + str(i) + 'w.an.dev.activenetwork.com'
else:
host = 'stage-activenet-' + str(i) + 'w.an.dev.activenetwork.com'
ssh2(host, username, passwd, cmd)
time2 = time.time()
totalTime = time2 - time1
print("Total time :" + str(totalTime))
print("Finish......")
<file_sep>/Neoload JARs/HPS3desEncrypt/src/com/active/hpsperf/test.java
package com.active.hpsperf;
public class test {
public static void main(String[] args) {
//String commit = "<KEY>;
//String commit = "<KEY>";
//System.out.println(CipherCode.decrypt(commit));
CommitRequestTest();
}
public static void PaymentRequestTest() {
Request pr = new Request();
String amsCipherText = "<KEY>;
String Modulus = "81649667d62b8309b1fbad7366949<KEY>eb";
String RequestTicket = "9889";
String paymentRequest = pr.getPaymentRequest(amsCipherText, Modulus,
RequestTicket);
System.out.println(paymentRequest);
System.out.println("Decrypt:");
System.out.println(CipherCode.decrypt(paymentRequest));
}
public static void CommitRequestTest() {
String amsCipherText = "<KEY>;
String Modulus = "<KEY>";
String response = "<KEY>";
String RequestTicket = "9889";
Request pr = new Request();
System.out.println(CipherCode.decrypt(pr.getCommitRequest(
amsCipherText, Modulus, response, RequestTicket)));
}
public static void getPaymentResponseTest() {
String response = "<KEY>";
PaymentResponse pr = new PaymentResponse(response);
System.out.println(pr.AuthorizationNumber);
System.out.println(pr.Message);
System.out.println(pr.OtherFields);
System.out.println(pr.WalletId);
}
}
<file_sep>/PythonProjects/PerfEnvRelated/RushPool_checkJVM.py
# -*- coding: utf-8 -*-
# !/usr/bin/python
import paramiko
import threading
def ssh2(host, username, passwd, cmd):
try:
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(host, 22, username, passwd, timeout=5)
for m in cmd:
stdin, stdout, stderr = ssh.exec_command(m)
# stdin.write("Y") #简单交互,输入 ‘Y’
out = stdout.readlines()
# 屏幕输出
for o in out:
print("%s : %s" % (host, o))
# print("\033[0;37;40m%s\033[0m : %s" % (host, o))
#print('%s\t start service OK\n' %(host))
ssh.close()
except:
print('%s\tError\n' %(host))
if __name__ == '__main__':
# cmd = ['pwd'] # 你要执行的命令列表
cmd_jvm = ['ps -ef|grep java|grep -v grep|grep ActiveNetServlet1'] # 你要执行的命令列表
cmd_ip = ['curl bot.whatismyipaddress.com'] # 你要执行的命令列表
cmd = ['ps -ef |grep java | grep -v grep | grep ActiveNetServlet | awk \'{print $14}\' | awk -F \'/\' \'{print $7}\''] # 你要执行的命令列表
# cat /proc/cpuinfo | grep name | cut -f2 -d: | uniq -c
# cmd = ['ps -ef|grep java|grep -v grep'] # 你要执行的命令列表
# cmd = ['ps -ef|grep java'] # 你要执行的命令列表
username = "deploy" # 用户名
passwd = "<PASSWORD>" # 密码
threads = [] # 多线程
print("Begin......Servlet")
for i in range(1, 19):
if i < 10:
host1 = 'perf-activenet-0' + str(i) + 'w.an.active.tan'
else:
host1 = 'perf-activenet-' + str(i) + 'w.an.active.tan'
a = threading.Thread(target=ssh2, args=(host1, username, passwd, cmd))
a.start()
print("Begin......Cache")
for i in range(1, 3):
if i < 10:
host2 = 'perf-ignite-0' + str(i) + 'w.an.active.tan'
else:
host2 = 'perf-ignite-' + str(i) + 'w.an.active.tan'
b = threading.Thread(target=ssh2, args=(host2, username, passwd, cmd))
b.start()
<file_sep>/PythonProjects/PerfEnvRelated/upload.py
# -*- coding: utf-8 -*-
# !/usr/bin/python
import paramiko
import datetime
import threading
import os
def upload(host, username, passwd, src, des):
try:
trans = paramiko.Transport((host, 22))
trans.connect(username=username, password=passwd)
sftp = paramiko.SFTPClient.from_transport(trans)
print('upload file on %s start %s ' %( host, datetime.datetime.now()))
sftp.put(src,des)
print('upload file on %s end %s ' % (host, datetime.datetime.now()))
trans.close()
except Exception as e:
print('%s\t connect error\n' %(host))
print("-----------ErrorLog-----------")
print(e)
if __name__ == '__main__':
username = "deploy" # 用户名
passwd = "<PASSWORD>" # 密码
print("servlet service properties -- ")
ServiceProperties_servlet_srcfile = "/Users/ajia/Documents/tmp/Settings/ServletSettings/service.properties"
ServiceProperties_servlet_desfile = "/opt/active/sites/acm01vegas/ActiveNetServlet/config/service.properties"
threads = [] # 多线程
for i in range(20, 21):
if i < 10:
host = 'perf-activenet-0' + str(i) + 'w.an.active.tan'
else:
host = 'perf-activenet-' + str(i) + 'w.an.active.tan'
b = threading.Thread(target=upload, args=(host, username, passwd, ServiceProperties_servlet_srcfile, ServiceProperties_servlet_desfile))
b.start()
print("servlet service properties done -- ")
print("servlet service properties -- ")
SdiIni_servlet_srcfile = "/Users/ajia/Documents/tmp/Settings/ServletSettings/sdi.ini"
SdiIni_servlet_desfile = "/opt/active/sites/acm01vegas/ActiveNetServlet/config/sdi.ini"
threads = [] # 多线程
for i in range(20, 21):
if i < 10:
host = 'perf-activenet-0' + str(i) + 'w.an.active.tan'
else:
host = 'perf-activenet-' + str(i) + 'w.an.active.tan'
b = threading.Thread(target=upload, args=(host, username, passwd, SdiIni_servlet_srcfile, SdiIni_servlet_desfile))
b.start()
print("servlet service properties done -- ")
<file_sep>/Neoload LG Upgrade/SilentInstall.sh
# Author: <NAME>
# Last Modified: 2018-06-26
# Purpose: Slient install Neoload Load Generators with WAN Emulation
# Precondition: make sure you have following files in install folder:
#
# neoload_6_2_2_linux_x64.sh
# ipfw
# ipfw_mod.ko
# agent.properties
# ------------------------------------------------------------------------------
sudo /opt/neoload/bin/LoadGeneratorAgentService stop
sudo sh /tmp/Neoloadfile/neoload_6_2_2_linux_x64.sh -q -dir /opt/neoload622 "-Vsys.installationTypeId=Load Generator" -Vsys.component.Common\$Boolean=true "-Vsys.component.Load Generator\$Boolean=true"
sudo cp /tmp/Neoload622files/ipfw /opt/neoload622/tools/ipfw/linux/ipfw
sudo cp /tmp/Neoload622files/ipfw_mod.ko /opt/neoload622/tools/ipfw/linux/ipfw_mod.ko
sudo chmod 755 /opt/neoload622/tools/ipfw/linux/ipfw
sudo chmod 644 /opt/neoload622/tools/ipfw/linux/ipfw_mod.ko
sudo cp /tmp/Neoload622files/agent.properties /opt/neoload622/conf/agent.properties
sudo chmod 644 /opt/neoload622/conf/agent.properties
sudo unlink /opt/neoload
sudo ln -s /opt/neoload622/ /opt/neoload
sudo /opt/neoload/bin/LoadGeneratorAgentService start
<file_sep>/PythonProjects/PerfEnvRelated/FTP_Tools.py
# -*- coding: utf-8 -*-
# !/usr/bin/python
import paramiko
import datetime
import threading
import os
import logging
logging.basicConfig(level=logging.CRITICAL, format=' %(asctime)s - %(name)s - %(levelname)s - %(message)s')
# logging.basicConfig(level=logging.DEBUG, format=' %(asctime)s - %(name)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)
def upload(host, username, passwd, local, remote, file_type):
try:
logger.debug("current server : %r " %(host))
logger.debug("current upload mode : %r" %('file' if file_type == 1 else 'floder'))
trans = paramiko.Transport((host, 22))
trans.connect(username=username, password=<PASSWORD>)
sftp = paramiko.SFTPClient.from_transport(trans)
if file_type == 1:
print(' upload file on %s Start %s ' % ( host, datetime.datetime.now()))
sftp.put(local, remote)
print('upload file on %s End %s ' % ( host, datetime.datetime.now()))
elif file_type == 2:
files = os.listdir(local)
for f in files:
print(' upload file %s on %s Start %s ' % (str(f), host, datetime.datetime.now()))
sftp.put(os.path.join(local, f), os.path.join(remote, f))
print('upload file %s on %s End %s ' % (str(f), host, datetime.datetime.now()))
else:
raise Exception('invalid document type')
except Exception as e:
print('%s\t connect error\n' %(host))
print("-----------ExceptLog-----------")
print(e)
finally:
trans.close()
def download(host, username, passwd, local, remote, file_type):
try:
logger.debug("current server : %r " % (host))
logger.debug("current download mode : %r" % ('file' if file_type == 1 else 'floder'))
trans = paramiko.Transport((host, 22))
trans.connect(username=username, password=<PASSWORD>)
sftp = paramiko.SFTPClient.from_transport(trans)
if file_type == 1:
print(' download file on %s Start %s ' % (host, datetime.datetime.now()))
sftp.get(remote, local)
print('download file on %s End %s ' % (host, datetime.datetime.now()))
elif file_type == 2:
files = sftp.listdir(remote)
for f in files:
print(' download file %s on %s Start %s ' % (str(f), host, datetime.datetime.now()))
sftp.get(os.path.join(remote, f), os.path.join(local, f))
print('download file %s on %s End %s ' % (str(f), host, datetime.datetime.now()))
else:
raise Exception('invalid document type')
except Exception as e:
print('%s\t connect error\n' %(host))
print("-----------ExceptLog-----------")
print(e)
finally:
trans.close()
if __name__ == '__main__':
username = "deploy" # 用户名
passwd = "<PASSWORD>" # 密码
# test_local = "/Users/ajia/Documents/tmp/FTPTest"
# test_local_file = "/Users/ajia/Documents/tmp/FTPTest/test.txt"
#
# test_remote = "/opt/active/sites/perf03/ActiveNetServlet/logs"
# test_remote_file = "/opt/active/ActiveNet/perf/test.txt"
#
#
# local_file = "/Users/ajia/Documents/tmp/Settings/atmoic/ignite-cache.xml"
# # remote_file = "/opt/active/sites/acm01vegas/ActiveNetServlet/config/ignite-cache.xml"
# remote_file = "/opt/active/sites/ignite01/ActiveNetServlet/config/ignite-cache.xml"
#
#
# local_dir_ignite = '/Users/ajia/Documents/tmp/init_log/ignite03'
# local_dir_19 = '/Users/ajia/Documents/tmp/init_log/19'
# local_dir_20 = '/Users/ajia/Documents/tmp/init_log/20'
#
# remote_dir_ignite = '/opt/active/sites/ignite01/ActiveNetServlet/logs'
# remote_dir_19 = '/opt/active/sites/acm01vegas/ActiveNetServlet/logs'
# remote_dir_20 = '/opt/active/sites/acm01vegas/ActiveNetServlet/logs'
# threads = [] # 多线程
print("Begin......")
# for i in range(1, 3):
#
# if i < 10:
# host = 'perf-ignite-0' + str(i) + 'w.an.active.tan'
# else:
# host = 'perf-ignite-' + str(i) + 'w.an.active.tan'
for i in range(1, 2):
# for i in range(20, 21):
if i < 10:
host = 'perf-activenet-0' + str(i) + 'w.an.active.tan'
else:
host = 'perf-activenet-' + str(i) + 'w.an.active.tan'
print(host)
local_jettyxml = '/Users/ajia/Documents/tmp/3000test/pre/jetty.xml'
remote_jettyxml = '/opt/active/sites/acm01vegasjetty/ActiveNetServlet/config/jetty.xml'
local_serviceproperties = '/Users/ajia/Documents/tmp/3000test/pre/service.properties'
remote_serviceproperties = '/opt/active/sites/acm01vegasjetty/ActiveNetServlet/config/service.properties'
download(host, username, passwd, local_jettyxml, remote_jettyxml, 1)
download(host, username, passwd, local_serviceproperties, remote_serviceproperties, 1)
local_jettyxml_new = '/Users/ajia/Documents/tmp/3000test/new/jetty.xml'
local_serviceproperties_new = '/Users/ajia/Documents/tmp/3000test/new/service.properties'
# for i in range(3, 4):
#
# if i < 10:
# host = 'perf-ignite-0' + str(i) + 'w.an.active.tan'
# else:
# host = 'perf-ignite-' + str(i) + 'w.an.active.tan'
#upload, file =1, folder = 2
#file
# a = threading.Thread(target=upload, args=(host, username, passwd, test_local_file, test_remote_file, 1))
#folder
# a = threading.Thread(target=upload, args=(host, username, passwd, test_local, test_remote, 2))
# a.start()
# a.join()
#download, file =1, folder = 2
#file
# b = threading.Thread(target=upload, args=(host, username, passwd, local_file, remote_file, 1))
#folder
# b = threading.Thread(target=download, args=(host, username, passwd, local_dir, remote_dir, 2))
# b.start()
# b.join()
<file_sep>/PythonProjects/PerfEnvRelated/RushPool_Initialing.py
# -*- coding: utf-8 -*-
# !/usr/bin/python
import requests
import threading
import time
import logging
# logging.basicConfig(level=logging.DEBUG, format=' %(asctime)s - %(levelname)s - %(message)s')
# logging.disable(logging.CRITICAL)
def get_response(url):
requests.adapters.DEFAULT_RETRIES = 5
time_start = time.time()
response = requests.get(url)
# response.raise_for_status()
time_end = time.time()
initial_time = time_end - time_start
rsc = response.status_code
serverNo = url[22:24]
print("Server : %s ; Status_Code : %r ; initial time : %r s ; hosturl : %s" % (serverNo, rsc, initial_time, url))
if __name__ == '__main__' :
# logging.debug('start of program')
thread = []
# orgname = 'perf02'
orgname = 'acm01vegasjetty'
count = 0
# for i in range(2, 20):
for i in range(1, 19):
# for i in range(1, 2):
if (i < 10):
urlstr = "http://perf-activenet-0"+str(i)+"w.an.active.tan:3000/"+orgname+"/servlet/adminlogin.sdi"
# logging.debug('i is '+ str(i) + ' , url is ' + urlstr)
else:
urlstr = "http://perf-activenet-"+str(i)+"w.an.active.tan:3000/"+orgname+"/servlet/adminlogin.sdi"
# logging.debug('i is ' + str(i) + ' , url is ' + urlstr)
# print(urlstr)
# print("server %r is initialing" %(i))
a = threading.Thread(target=get_response, args=(urlstr,))
a.start()
count += 1
print("initial %d orgs" %(count))
# logging.debug('end of program')
<file_sep>/PythonProjects/PerfEnvRelated/ANE86288.py
# -*- coding: utf-8 -*-
# !/usr/bin/python
import requests
import csv
import logging
import datetime
logging.basicConfig(level=logging.DEBUG, format=' %(asctime)s - %(levelname)s - %(message)s')
logging.disable(logging.CRITICAL)
def getResponse(url, programID):
t1 = datetime.datetime.now()
response = requests.get(url)
t2 = datetime.datetime.now()
totalResponseTime = (t2 - t1).total_seconds()
print("ProgramID: %4s , totalResponseTime : %r s" % (programID, totalResponseTime ))
if __name__ == '__main__' :
logging.debug('start of program')
with open("D:/tmp/programID_Perf01.csv") as csvfile:
reader = csv.DictReader(csvfile)
column = [row['DCPROGRAM_ID'] for row in reader]
# print(column)
# https://anperf01.active.com/perf01/rest/program/exceptionandextradates?program_id=11&api_key=<KEY>
for i in column:
urlstr = "https://anperf01.active.com/perf01/rest/program/exceptionandextradates?program_id=" + str(
i) + "&api_key=<KEY>"
getResponse(urlstr, i)
logging.debug('end of program')
<file_sep>/RefreshPerfDB/MobileAPI-Perf01Jetty/10_CustomerQuestion_SetAsNotRequired.sql
use perf01jetty
update [CUSTOMQUESTIONS] set required = 0 where required = -1<file_sep>/UsefulJS/RandomChooseItemsFromArray.js
//facility ID list
var facilitylist = [3308,12966,2702,14126,14452,3150,14657,8239,13799,5883,10118,10971,14328,3937,7465,14992,16961];
//random choose count
var min = 1;
var max = facilitylist.length;
var choosecount = Math.round(Math.random()*(max-min+1)+min);
var facilitylistselect = [ ];
var facilitylistselectNum = choosecount;
for (var i = 0; i < facilitylistselectNum; i++) {
var ran = Math.floor(Math.random() * (facilitylist.length - i));
facilitylistselect.push(facilitylist[ran]);
facilitylist[ran] = facilitylist[facilitylist.length - i - 1];
};
logger.debug(facilitylistselect)
function random(min, max) {
return Math.round(Math.random()*(max-min+1)+min);
}<file_sep>/RefreshPerfDB/RefreshSTGDB/4_CUIRegister_ActivityPrep.sql
use lstgnaparec
-- Activities prep
/*
Activity Status Code:
0 - OPEN
1 - Closed
2 - Cancelled
3 - Tentative
4 - On Hold
*/
-- update SEASONS set
-- KEYBOARDDATE = '1899-12-30 00:00:00.000',
-- INTERNETDATE = '1899-12-30 00:00:00.000',
-- INTERNETENDDATE = '2017-12-31 00:00:00.000'
-- where seasonname like '2016%'
update ACTIVITIES set
activitygender=0
,agesmax=99
,agesmin=0
,nointernetreg=0
,enrollmax=9999
,maxenrolledonline=9999
,internetdate='12/30/1899'
,internetenddate='12/30/1899'
,endingdate='12/31/2018'
,keybrdentryenddate='12/30/1899'
,DONTRESERVEFACILITIES=-1
where activity_id = (select Activity_id from ACTIVITIES where ACTIVITYNAME like '%STG%')
update ACTIVITYREGISTRATIONWINDOWS set
INTERNET_DATE='1/1/2017'
,INTERNET_END_DATE='12/30/1899'
,[KEYBOARD_ENTRY_DATE]='1/1/2017'
,[NON_RES_KEYBOARD_ENTRY_DATE]='1/1/2017'
,[MEMBER_KEYBOARD_ENTRY_DATE]='1/1/2017'
,[NON_RES_INTERNET_DATE]='1/1/2017'
,[MEMBER_INTERNET_DATE]='1/1/2017'
where activity_id in (select activity_id from activities where season_id = 31 and activitystatus = 0)
update ACTIVITYSTATISTICS set number_open = 888
where activity_id in (select activity_id from activities where season_id = 31 and activitystatus = 0)<file_sep>/PythonProjects/PerfEnvRelated/LiveSessionCheck.py
# -*- coding: utf-8 -*-
# !/usr/bin/python
import requests
import threading
from bs4 import BeautifulSoup
import time
import re
def get_live_session_count(url):
time_start = time.time()
# soup = BeautifulSoup(requests.get(url).content, 'html.parser')
# tmp_number = re.search(requests.get(url).content, 'Active Session Count:(.*?)', re.I)
# print(tmp_number.group())
# # print(soup.prettify())
# # active_session_count = soup.find('td',attrs={"class":"altRowEven"})
# # print(type(active_session_count))
# # print(active_session_count)
print(requests.get(url).content)
time_end = time.time()
time_spend = time_end - time_start
# return active_session_count
# print(" time spent : %r s ; hosturl : %s" % (time_spend, url))
if __name__ == '__main__' :
# logging.debug('start of program')
# https://anprod.active.com/chicagoparkdistrict/servlet/displaysessiondata.sdi
# https://anprodca.active.com/vancouver
orgs = ['acm01vegasjetty']
# orgs = ['Chicagoparkdistrict', 'SeattleYMCA', 'MontgomeryCounty', 'YMCALA', 'vancouver', 'kansascityymca', 'northshoreymca', 'ymcasatx','YMCAGreaterBrandywine']
for org in orgs:
if 'vancouver' in org:
url = "https://anprodca.active.com/" + org + "/servlet/displaysessiondata.sdi"
elif 'acm01vegasjetty' in org: #debug
url = "https://anperf01.active.com/" + org + "/servlet/displaysessiondata.sdi"
else:
url = "https://anprod.active.com/" + org + "/servlet/displaysessiondata.sdi"
print(org)
current_live_session_count = get_live_session_count(url,)
<file_sep>/RefreshPerfDB/MakeCPDtoPerfOrg/04_ActivitiesPrep.sql
-- Activities prep
/*
Activity Status Code:
0 - OPEN
1 - Closed
2 - Cancelled
3 - Tentative
4 - On Hold
*/
use acm01vegasjetty
-- update SEASONS set
-- KEYBOARDDATE = '1899-12-30 00:00:00.000',
-- INTERNETDATE = '1899-12-30 00:00:00.000',
-- INTERNETENDDATE = '2017-12-31 00:00:00.000'
-- where seasonname like '2016%'
update ACTIVITIES set
activitygender=0
,agesmax=99
,agesmin=0
,nointernetreg=0
,enrollmax=9999
,maxenrolledonline=9999
,internetdate='12/30/1899'
,internetenddate='12/30/1899'
,endingdate='12/31/2021'
,keybrdentryenddate='12/30/1899'
,DONTRESERVEFACILITIES=-1
where season_id in (42, 45, 49, 50) and activitystatus = 0
update ACTIVITYREGISTRATIONWINDOWS set
INTERNET_DATE='1/1/2019'
,INTERNET_END_DATE='12/30/1899'
,[KEYBOARD_ENTRY_DATE]='1/1/2019'
,[NON_RES_KEYBOARD_ENTRY_DATE]='1/1/2019'
,[MEMBER_KEYBOARD_ENTRY_DATE]='1/1/2019'
,[NON_RES_INTERNET_DATE]='1/1/2019'
,[MEMBER_INTERNET_DATE]='1/1/2019'
where activity_id in (select activity_id from activities where season_id in (42, 45, 49, 50) and activitystatus = 0)
update ACTIVITYSTATISTICS set number_open = 888
where activity_id in (select activity_id from activities where season_id in (42, 45, 49, 50) and activitystatus = 0)<file_sep>/RefreshPerfDB/RefreshSTGDB/12_DisableDatabaseAutoBackup.sql
use lstgbreckenridgerec
update systeminfo set keywordvalue = '/opt/active/ActiveNet/perf/SQLBACKUPS/lstgbreckenridgerec' where keyword = 'full_backup_unc'
update systeminfo set keywordvalue = 'W:\perf\SQLBACKUPS\lstgbreckenridgerec' where keyword = 'full_backup_unc_remote'
update systeminfo set keywordvalue = 'X:\lstgbreckenridgerec' where keyword = 'image_storage_path'
update systeminfo set keywordvalue = '/opt/active/data/an_filedata/lstgbreckenridgerec' where keyword = 'image_storage_path_local'
update systeminfo set keywordvalue = '/lstgbreckenridgerec/jreport/' where keyword = 'jreport_jsp_path'
select * from systeminfo where keyword in ('full_backup_unc','full_backup_unc_remote','image_storage_path','image_storage_path_local','jreport_jsp_path')
-- Manually disable DB automatic backup at 'Web Admin' -> 'Active Staff'
<file_sep>/PythonProjects/PerfEnvRelated/tmpfile.py
# -*- coding: utf-8 -*-
# !/usr/bin/python
import paramiko
import datetime
import threading
import os
def get_current_orgname(host, username, passwd, cmd):
try:
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(host, 22, username, passwd, timeout=5)
for m in cmd:
stdin, stdout, stderr = ssh.exec_command(m)
# stdin.write("Y") #简单交互,输入 ‘Y’
out = stdout.readlines()
# 屏幕输出
for o in out:
print("%s : %s" % (host, o))
# print("\033[0;37;40m%s\033[0m : %s" % (host, o))
#print('%s\t start service OK\n' %(host))
ssh.close()
except:
print('%s\tError\n' %(host))
def upload(host, username, passwd, local, remote, file_type):
try:
trans = paramiko.Transport((host, 22))
trans.connect(username=username, password=<PASSWORD>)
sftp = paramiko.SFTPClient.from_transport(trans)
if file_type == 1:
print(' upload file on %s Start %s ' % ( host, datetime.datetime.now()))
sftp.put(local, remote)
print('upload file on %s End %s ' % ( host, datetime.datetime.now()))
elif file_type == 2:
files = os.listdir(local)
for f in files:
print(' upload file %s on %s Start %s ' % (str(f), host, datetime.datetime.now()))
sftp.put(os.path.join(local, f), os.path.join(remote, f))
print('upload file %s on %s End %s ' % (str(f), host, datetime.datetime.now()))
else:
raise Exception('invalid document type')
except Exception as e:
print('%s\t connect error\n' %(host))
print("-----------ExceptLog-----------")
print(e)
finally:
trans.close()
def download(host, username, passwd, local, remote, file_type):
try:
trans = paramiko.Transport((host, 22))
trans.connect(username=username, password=passwd)
sftp = paramiko.SFTPClient.from_transport(trans)
if file_type == 1:
print(' download file on %s Start %s ' % (host, datetime.datetime.now()))
sftp.get(remote, local)
print('download file on %s End %s ' % (host, datetime.datetime.now()))
elif file_type == 2:
files = sftp.listdir(remote)
for f in files:
if '.log' in f:
print(' download file %s on %s Start %s ' % (str(f), host, datetime.datetime.now()))
sftp.get(os.path.join(remote, f), os.path.join(local, f))
print('download file %s on %s End %s ' % (str(f), host, datetime.datetime.now()))
else:
raise Exception('invalid document type')
except Exception as e:
print('%s\t connect error\n' %(host))
print("-----------ExceptLog-----------")
print(e)
finally:
trans.close()
if __name__ == '__main__':
username = "deploy" # 用户名
passwd = "<PASSWORD>" # 密码
cmd_getorgname = ['ps -ef|grep java|grep -v grep|grep ActiveNetServlet1 | awk \'{print $13}\' | awk -F \'/\' \'{print $5}\'']
local_dir_ignite = '/Users/ajia/Documents/tmp/error_logs/Cache_03W'
local_dir_19 = '/Users/ajia/Documents/tmp/error_logs/AUI_19'
local_dir_20 = '/Users/ajia/Documents/tmp/error_logs/AUI_20'
remote_dir_ignite = '/opt/active/sites/ignite01/ActiveNetServlet/logs'
# threads = [] # 多线程
print("Begin......")
print("AUI Server..")
for i in range(19, 20):
if i < 10:
host = 'perf-activenet-0' + str(i) + 'w.an.active.tan'
else:
host = 'perf-activenet-' + str(i) + 'w.an.active.tan'
#download, file =1, folder = 2
#folder
if i == 19:
orgname_19_list = get_current_orgname(host, username, passwd, cmd_getorgname)
orgname_19 = ''.join(orgname_19_list).strip()
remote_dir_19 = '/opt/active/sites/' + orgname_19 + '/ActiveNetServlet/logs'
print(remote_dir_19)
download(host, username, passwd, local_dir_19, remote_dir_19, 2)
elif i == 20:
orgname_20_list = get_current_orgname(host, username, passwd, cmd_getorgname)
orgname_20 = ''.join(orgname_20_list).strip()
remote_dir_20 = '/opt/active/sites/' + orgname_20 + '/ActiveNetServlet/logs'
print(remote_dir_20)
download(host, username, passwd, local_dir_20, remote_dir_20, 2)
else:
print("no host is found")
# print("Cache Server..")
#
# for i in range(3, 4):
#
# if i < 10:
# host = 'perf-ignite-0' + str(i) + 'w.an.active.tan'
# else:
# host = 'perf-ignite-' + str(i) + 'w.an.active.tan'
#
# #download, file =1, folder = 2
# #folder
# download(host, username, passwd, local_dir_ignite, remote_dir_ignite, 2)
print("End......")
<file_sep>/RefreshPerfDB/MakeCPDtoPerfOrg/25_AccountRedesign.sql
use perf01
IF not exists (select keywordvalue from SYSTEMINFO where KEYWORD = 'enable_new_cui_account')
BEGIN
insert into SYSTEMINFO (KEYWORD, KEYWORDVALUE) values ('enable_new_cui_account', 'true')
END
ELSE
BEGIN
UPDATE dbo.systeminfo SET KEYWORDVALUE = 'true' WHERE KEYWORD = '<PASSWORD>'
END
select * from SYSTEMINFO where keyword = 'enable_new_cui_account'<file_sep>/RefreshPerfDB/MakeCPDtoPerfOrg/12_DisableDatabaseAutoBackup.sql
use acm01vegasjetty
update systeminfo set keywordvalue = '/opt/active/ActiveNet/perf/SQLBACKUPS/acm01vegasjetty' where keyword = 'full_backup_unc'
update systeminfo set keywordvalue = 'W:\perf\SQLBACKUPS\acm01vegasjetty' where keyword = 'full_backup_unc_remote'
update systeminfo set keywordvalue = 'X:\acm01vegasjetty' where keyword = 'image_storage_path'
update systeminfo set keywordvalue = '/opt/active/data/an_filedata/acm01vegasjetty' where keyword = 'image_storage_path_local'
update systeminfo set keywordvalue = '/acm01vegasjetty/jreport/' where keyword = 'jreport_jsp_path'
select * from systeminfo where keyword in ('full_backup_unc','full_backup_unc_remote','image_storage_path','image_storage_path_local','jreport_jsp_path')
-- Manually disable DB automatic backup at 'Web Admin' -> 'Active Staff'
|
7f1724887f0bf66711758d9a163841428d656c44
|
[
"SQL",
"reStructuredText",
"JavaScript",
"INI",
"Java",
"Python",
"Shell"
] | 80 |
SQL
|
AndrewMO/BackupFiles
|
aa4bda7a280276111bd960622ae5496cf471037b
|
b2fdc965d855d9ba9c449f7e520c051ce9707ba2
|
refs/heads/master
|
<repo_name>xiejunping/wapman-admin<file_sep>/app/common/helper/redis.js
const redis = require('redis')
const { host, port, password } = require('../config/redis')
/**
* redis
*/
class Redis {
constructor(host, port, password) {
this.host = host;
this.port = port;
this.password = <PASSWORD>;
this.client = this.init()
}
init() {
return redis.createClient({
host: this.host,
port: this.port,
password: this.password
});
}
/**
* 设置key
* @param key
* @param value
* @returns {Promise<any>}
*/
set(key, value) {
const { client } = this
return new Promise((resolve, reject) => {
client.set(key, value, (error, response) => {
if (error) reject(error)
else resolve(response)
})
})
}
/**
* 获取key
* @param key
* @returns {Promise<any>}
*/
get(key) {
const { client } = this
return new Promise((resolve, reject) => {
client.get(key, (error, response) => {
if (error) reject(error)
else resolve(response)
})
})
}
/**
* 删除key
* @param key
* @returns {Promise<any>}
*/
del(key) {
const { client } = this
return new Promise((resolve, reject) => {
client.del(key, (error, response) => {
if (error) reject(error)
else resolve(response)
})
})
}
/**
* 存在key
* @param key
* @returns {Promise<any>}
*/
exists(key) {
const { client } = this
return new Promise((resolve, reject) => {
client.exists(key, (error, response) => {
if (error) reject(error)
else resolve(response)
})
})
}
/**
* 设置过期时间缀
* @param key
* @param timestamp
* @returns {Promise<any>}
*/
pexpireat(key, timestamp) {
const { client } = this
return new Promise((resolve, reject) => {
client.pexpireat(key, timestamp, (error, response) => {
if (error) reject(error)
else resolve(response)
})
})
}
/**
* 查找所有key
* @param pattern
* @returns {Promise<any>}
*/
keys(pattern) {
const { client } = this
return new Promise((resolve, reject) => {
client.keys(pattern, (error, response) => {
if (error) reject(error)
else resolve(response)
})
})
}
/**
* 判断过期
* @param key
* @returns {Promise<any>}
*/
pttl(key) {
const { client } = this
return new Promise((resolve, reject) => {
client.pttl(key, (error, response) => {
if (error) reject(error)
else resolve(response)
})
})
}
}
module.exports = new Redis(host, port, password)
<file_sep>/app/action/login.action.js
const RD = require('../common/helper/redis');
const Mysql = require('../common/helper/mysql')
const DB = new Mysql('wap_login')
const BaseAction = require('./base.action.class')
const LoginModel = require('../model/login.model')
const loginModel = new LoginModel(DB)
class LoginAction extends BaseAction {
constructor (model) {
super(model)
}
/**
* 新增登录记录
* @param info
* @returns {Promise<*|Promise<*>>}
*/
async setLoginInfo (info) {
info = Object.assign({mode: 'in'}, info)
return super.add(info)
}
/**
* 新增退出记录
* @param info
* @returns {Promise<*>}
*/
async setLogoutInfo (info) {
info = Object.assign({mode: 'out'}, info)
return super.add(info)
}
async storeDel (session) {
const stat = await RD.exists(session)
if(!!stat) return await RD.del(session)
return
}
}
module.exports = new LoginAction(loginModel)
<file_sep>/app/common/config/mail.js
const prod = {
SERVICE: 'QQex',
AUTH_ACCOUNT: '<EMAIL>',
AUTH_PASS: '<PASSWORD>',
FROM: '"VUE技术栈" <<EMAIL>>'
};
const dev = {
SERVICE: 'QQex',
AUTH_ACCOUNT: '<EMAIL>',
AUTH_PASS: '<PASSWORD>',
FROM: '"VUE技术栈" <<EMAIL>>'
};
module.exports = process.env.NODE_ENV === 'production' ? prod : dev
<file_sep>/app/utils/index.js
const random = require('string-random');
const utils = {
/**
* 随机范围的数
* @param min
* @param max
* @returns {number}
*/
random: (min, max) => {
return Math.floor(Math.random() * (max - min + 1) + min)
},
/**
* 随机纯数字的字符串
* @param len
* @returns {string}
*/
randomNumber: len => {
if (typeof len !== 'number' || len === 0) return
return random(len, {letters: false})
},
randomString: len => {
if (typeof len !== 'number' || len === 0) return
return random(len)
},
randomLetter: len => {
if (typeof len !== 'number' || len === 0) return
return random(len, {numbers: false})
},
trim: str => {
if (String.prototype.trim) {
return str === null ? '' : String.prototype.trim.call(str);
} else {
return str.replace(/(^\s*)|(\s*$)/g, '');
}
},
isEmpty: str => {
const reg = new RegExp(/^[\s]{0,}$/)
str = utils.trim(str)
return reg.test(str)
},
isArray: arr => {
return Object.prototype.toString.call(arr) === "[object Array]"
},
}
module.exports = utils
<file_sep>/bin/wapman.sql
-- ----------------------------
-- Table structure for wap_access
-- ----------------------------
DROP TABLE IF EXISTS `wap_access`;
CREATE TABLE `wap_access` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`name` varchar(50) CHARACTER SET utf8 NOT NULL COMMENT '权限名称',
`urls` varchar(100) CHARACTER SET utf8 NOT NULL COMMENT '权限链接',
`status` tinyint(1) NOT NULL DEFAULT '1' COMMENT '状态 1 -> 有效 ',
`creat_date` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建日期',
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- ----------------------------
-- Table structure for wap_group
-- ----------------------------
DROP TABLE IF EXISTS `wap_group`;
CREATE TABLE `wap_group` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`pid` int(11) NOT NULL DEFAULT '0' COMMENT '父级ID',
`name` varchar(20) NOT NULL COMMENT '用户分组',
`level` tinyint(1) NOT NULL DEFAULT '1' COMMENT '等级',
`status` tinyint(1) NOT NULL DEFAULT '1' COMMENT '状态 1 -> 有效 0 -> 禁用',
`order` tinyint(2) NOT NULL DEFAULT '0' COMMENT '排序',
`creat_date` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建日期',
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=7 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Table structure for wap_group_user
-- ----------------------------
DROP TABLE IF EXISTS `wap_group_user`;
CREATE TABLE `wap_group_user` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`uid` int(11) unsigned NOT NULL COMMENT '用户ID',
`gid` int(11) unsigned NOT NULL COMMENT '组ID',
`creat_date` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建日期',
`update_date` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '更新日期',
PRIMARY KEY (`id`),
UNIQUE KEY `uid` (`uid`) USING BTREE COMMENT '唯一分组'
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-- ----------------------------
-- Table structure for wap_login
-- ----------------------------
DROP TABLE IF EXISTS `wap_login`;
CREATE TABLE `wap_login` (
`id` int(10) unsigned NOT NULL AUTO_INCREMENT,
`uid` int(15) NOT NULL,
`name` varchar(20) DEFAULT NULL,
`creat_date` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
`sessionId` varchar(64) DEFAULT NULL,
`mode` varchar(15) NOT NULL,
`type` varchar(15) NOT NULL,
`client` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=42 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Table structure for wap_mall
-- ----------------------------
DROP TABLE IF EXISTS `wap_mall`;
CREATE TABLE `wap_mall` (
`id` int(10) unsigned NOT NULL AUTO_INCREMENT,
`sdk` int(15) NOT NULL,
`name` varchar(20) DEFAULT NULL,
`type` varchar(15) NOT NULL,
`real_val` int(15) NOT NULL,
`real_unit` int(3) NOT NULL,
`vir_val` int(15) NOT NULL,
`vir_unit` int(3) NOT NULL,
`status` int(1) DEFAULT NULL,
`creat_date` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-- ----------------------------
-- Table structure for wap_menu
-- ----------------------------
DROP TABLE IF EXISTS `wap_menu`;
CREATE TABLE `wap_menu` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`pid` int(11) NOT NULL DEFAULT '0' COMMENT '父级ID',
`name` varchar(20) NOT NULL,
`title` varchar(50) NOT NULL COMMENT '菜单名称',
`path` varchar(50) NOT NULL COMMENT '链接',
`level` tinyint(2) NOT NULL DEFAULT '1' COMMENT '菜单等级',
`order` tinyint(2) NOT NULL DEFAULT '0' COMMENT '排序',
`icon` varchar(20) NOT NULL COMMENT '图标',
`component` varchar(20) NOT NULL COMMENT '组件名',
`creat_date` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建日期',
`status` tinyint(1) NOT NULL DEFAULT '1' COMMENT '状态 1 -> 启用 0 -> 禁用',
PRIMARY KEY (`id`),
UNIQUE KEY `name` (`name`),
KEY `path` (`path`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=12 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Table structure for wap_role
-- ----------------------------
DROP TABLE IF EXISTS `wap_role`;
CREATE TABLE `wap_role` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`gid` int(11) NOT NULL,
`name` varchar(20) NOT NULL,
`status` tinyint(1) NOT NULL DEFAULT '1' COMMENT '状态 1 -> 有效 0 -> 无效',
`creat_date` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建日期',
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=3 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Table structure for wap_role_access
-- ----------------------------
DROP TABLE IF EXISTS `wap_role_access`;
CREATE TABLE `wap_role_access` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`role_id` int(11) NOT NULL,
`access_id` int(11) NOT NULL,
`creat_date` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
`update_date` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '更新日期',
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- ----------------------------
-- Table structure for wap_signed
-- ----------------------------
DROP TABLE IF EXISTS `wap_signed`;
CREATE TABLE `wap_signed` (
`id` int(10) unsigned NOT NULL AUTO_INCREMENT,
`uid` int(15) NOT NULL,
`name` varchar(20) DEFAULT NULL,
`type` varchar(15) NOT NULL,
`reward` varchar(15) NOT NULL,
`client` varchar(64) DEFAULT NULL,
`creat_date` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-- ----------------------------
-- Table structure for wap_unit
-- ----------------------------
DROP TABLE IF EXISTS `wap_unit`;
CREATE TABLE `wap_unit` (
`id` int(10) unsigned NOT NULL AUTO_INCREMENT,
`pid` int(10) NOT NULL DEFAULT '0',
`name` varchar(20) NOT NULL,
`mode` varchar(15) DEFAULT NULL,
`status` int(1) NOT NULL DEFAULT '1',
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=19 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Table structure for wap_user
-- ----------------------------
DROP TABLE IF EXISTS `wap_user`;
CREATE TABLE `wap_user` (
`id` int(15) unsigned NOT NULL AUTO_INCREMENT,
`name` varchar(20) DEFAULT NULL COMMENT '账号',
`phone` varchar(11) DEFAULT NULL COMMENT '手机号码',
`nickname` varchar(20) DEFAULT NULL COMMENT '昵称',
`password` varchar(65) DEFAULT NULL COMMENT '密码',
`email` varchar(20) DEFAULT NULL COMMENT '验证邮箱',
`sex` int(1) unsigned DEFAULT '0' COMMENT '性别 0 -> 男 1 -> 女',
`qq` varchar(15) DEFAULT NULL COMMENT '腾讯QQ',
`cid` varchar(20) DEFAULT NULL COMMENT '邀请码',
`is_setpass` int(1) NOT NULL DEFAULT '0' COMMENT '是否设置密码 1-> 设置 0 -> 未设',
`reg_date` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '注册日期',
`avatar` varchar(255) DEFAULT NULL COMMENT '头像',
`pid` varchar(20) DEFAULT NULL COMMENT '推荐码',
`all_pid` varchar(255) DEFAULT NULL COMMENT '推荐树',
`coin` int(5) DEFAULT '0' COMMENT '金币',
`score` int(5) DEFAULT '0' COMMENT '积分',
`super` tinyint(1) NOT NULL DEFAULT '0' COMMENT '超级管理 1 -> 是 0 -> 否',
PRIMARY KEY (`id`),
UNIQUE KEY `username_unique_key` (`name`,`phone`)
) ENGINE=InnoDB AUTO_INCREMENT=7 DEFAULT CHARSET=utf8;
<file_sep>/app/action/role.action.js
const Mysql = require('../common/helper/mysql')
const DB = new Mysql('wap_role')
const BaseAction = require('./base.action.class')
const RoleModel = require('../model/role.model')
const roleModel = new RoleModel(DB)
const logger = require('../controllers/logger');
const utils = require('../utils/index');
const Mailer = require('../controllers/mailer')
class RoleAction extends BaseAction {
constructor (model) {
super(model)
}
async getRoles ({gid, pageIndex, pageSize}) {
const info = {
tableName: 'wap_role',
whereJson: {
and: {gid}
},
limitArr: [
(pageIndex - 1) * pageSize, pageSize
]
};
return await this.Model.fetchAll(info)
}
async countRoles ({gid}) {
const info = {
tableName: 'wap_role',
whereJson: {
and: {gid}
}
};
const rs = await this.Model.fetchAll(info)
return !!rs ? rs.length : 0
}
async getRolesByJson (info) {
return await super.getRows(info)
}
async removeRole (id, gid) {
try {
const sqlMod = `UPDATE \`wap_role\` SET \`gid\` = ${gid} WHERE id IN (${id.join(',')})`
return await super.query(sqlMod)
} catch (error) {
logger.error(error)
}
}
}
module.exports = new RoleAction(roleModel)
<file_sep>/app/action/unit.action.js
const Mysql = require('../common/helper/mysql');
const DB = new Mysql('wap_unit');
const unitModel = require('../model/unit.model')(DB);
const action = {
/**
* 新增记录
* @param info
* @returns {Promise<*|Promise<*>>}
*/
async addUnit (info) {
return unitModel.add(info);
},
async excute () {
// 事务
await unitModel.ab();
},
};
module.exports = action;
<file_sep>/app/common/config/sms.js
const dev = {
appId: 1400105074,
appKey: '7d4f6c9e7947ea6c820ff8744119e403',
smsSign: 'VUE技术栈',
templateId: [146721, 146727, 146732]
}
const prod = {
appId: 1400105074,
appKey: '7d4f6c9e7947ea6c820ff8744119e403',
smsSign: 'VUE技术栈',
templateId: [146721, 146727, 146732]
}
module.exports = process.env.NODE_ENV === 'production' ? prod : dev
<file_sep>/middleware/database.js
const RD = require('../app/common/helper/redis');
const Pool = require('../app/common/helper/pool')
const logger = require('../app/controllers/logger')
const database = app => {
const client = RD.init();
const pool = Pool.init();
// 建立连接打印信息
pool.getConnection((error, connection) => {
if (error) logger.console(`Mysql Connected failed`)
else if(connection) logger.console(`Mysql Connected to MongoDB`)
})
client.on('connect', () => {
logger.console(`Redis connect Successful`)
})
client.on('error', (err) => {
logger.console(`Redis throw Error: ${err}`)
})
}
module.exports = database
<file_sep>/middleware/connect.js
const path = require('path');
const favicon = require('koa-favicon');
const cors = require('koa2-cors');
const json = require('koa-json');
const logger = require('koa-logger');
const log4js = require('../app/controllers/logger');
const onError = require('koa-onerror');
const bodyParser = require('koa-bodyparser');
const koaStatic = require('koa-static');
const views = require('koa-views');
const resFormat = require('../app/controllers/responseFmt');
const log = process.env.NODE_ENV === 'production' ? (str, args) => log4js.info(args) : '';
const err = process.env.NODE_ENV === 'production' ? (err, ctx) => log4js.errLogger(ctx, err) : (err, ctx) => log4js.error(err, ctx);
const connect = app => {
// JSON
app.use(bodyParser({
formLimit: '1mb',
enableTypes:['json', 'form', 'text']
}));
// 可接收json content-type
app.use(json());
// 捕获错误日志
onError(app);
app.on('error', err);
// app.on('missed', () => {
// ctx.auth = '验证失败,请登陆'
// console.error(ctx.auth)
// });
// app.on('expired', () => {
// ctx.auth = '验证已过期,请重新登陆'
// console.error(ctx.auth)
// });
// 日志
app.use(logger(log));
// 静态目录
app.use(koaStatic(path.join(__dirname, '../public/'), { maxAge: 60000 * 1440 * 30 }));
// 网站图标
app.use(favicon(path.join(__dirname, '../public/images/favicon.ico')));
// 跨域支持
app.use(cors({ maxAge: 3600, credentials: true }));
// 模版
app.use(views(path.join(__dirname, '../app/views'), {
extension: 'pug'
}));
// 格式化输出
app.use(resFormat);
};
module.exports = connect;
<file_sep>/app/common/config/server.js
/**
* 配置文件
* @type {{port: string, privateSession: string}}
*/
const dev = {
host: 'localhost',
port: '3000',
privateSession: 'SESSIONID', // koa-seeeion
errLogin: 180,
codeExpire: 30,
codeValid: 5,
sessionExprie: 1440 * 1, // login session expire
secret: 'WAPMAN' // user pass secret
}
const prod = {
host: 'localhost',
port: '3000',
privateSession: 'SESSIONID', // koa-seeeion
errLogin: 180,
codeExpire: 30,
codeValid: 5,
sessionExprie: 1440 * 7, // login session expire
secret: 'WAPMAN' // user pass secret
}
module.exports = process.env.NODE_ENV === 'production' ? prod : dev
<file_sep>/app/common/helper/mysql.js
const async = require('async');
const Pool = require('./pool');
const pool = Pool.init();
/**
* 数据库模型
*/
class DB {
/**
* 构造方法
*/
constructor(tableName) {
this.tableName = tableName;
this.pool = pool;
}
/**
* 创建单个连接
* @returns {Promise<any>}
*/
getConnection() {
const { pool } = this
return new Promise((resolve, reject) => {
pool.getConnection((err, connection) => {
if (err) reject(err)
else resolve(connection)
})
})
}
/**
* 数据查询接口
* @param tableName
* @param idJson
* @returns {Promise<any>}
*/
fetchRow(idJson) {
const { tableName, pool } = this
return new Promise((resolve, reject) => {
const sqlMod = `SELECT * FROM ${tableName} WHERE ?`
pool.query(sqlMod, idJson, function(error, results) {
if (error) {
reject(error)
} else {
if (results) {
resolve(results.pop())
} else {
resolve(results)
}
}
})
})
}
/**
* 取数据集合
* @param idJson
* @returns {Promise<any>}
*/
fetchRows(idJson) {
const { tableName, pool } = this
return new Promise((resolve, reject) => {
const sqlMod = `SELECT * FROM ${tableName} WHERE ?`
pool.query(sqlMod, idJson, function (error, results) {
if (error) {
reject(error)
} else resolve(results)
})
})
}
/**
* 数据插入接口
* @param tableName
* @param rowInfo
* @returns {Promise<any>}
*/
insert(rowInfo) {
const { tableName, pool } = this
return new Promise((resolve, reject) => {
const sqlMod = `INSERT INTO ${tableName} SET ?`
pool.query(sqlMod, rowInfo, function(error, result) {
if (error) reject(error)
else resolve(result)
})
})
}
/**
* 数据修改接口
* @param tableName
* @param idJson
* @param rowInfo
* @returns {Promise<any>}
*/
update(idJson, rowInfo) {
const { tableName, pool } = this
return new Promise((resolve, reject) => {
const sqlMod = `UPDATE ${tableName} SET ? WHERE ?`
pool.query(sqlMod, [rowInfo, idJson], function (error, result) {
if (error) reject(error)
else resolve(result)
})
})
}
/**
* 数据删除接口
* @param tableName
* @param idJson
* @returns {Promise<any>}
*/
remove(idJson) {
const { tableName, pool } = this
return new Promise((resolve, reject) => {
const sqlMod = `DELETE FROM ${tableName} WHERE ?`
pool.query(sqlMod, idJson, function (error, result) {
if (error) reject(error)
else resolve(result)
})
})
}
getAll () {
const { tableName, pool } = this
return new Promise((resolve, reject) => {
const sqlMod = `SELECT * FROM ${tableName}`
pool.query(sqlMod, function (error, result) {
if (error) reject(error)
else resolve(result)
})
})
}
/**
* 统计
* @param idJson
* @returns {Promise<any>}
*/
count(idJson) {
const { tableName, pool } = this
return new Promise((resolve, reject) => {
const sqlMod = `SELECT COUNT(*) as count FROM ${tableName} WHERE ?`
pool.query(sqlMod, idJson, function (error, result) {
if (error) reject(error)
else resolve(result.pop())
})
})
}
/**
* 自定义查询
* @param sql
* @returns {Promise<any>}
*/
queryStr(sqlMod) {
const { pool } = this
return new Promise((resolve, reject) => {
pool.query(sqlMod, function (error, result) {
if (error) {
reject(error)
} else {
resolve(result)
}
})
})
}
/**
* 复合查询
* @param tableName
* @param whereJson
* @param orderByJson
* @param limitArr
* @param selectStr
* @returns {Promise<any>}
*/
fetchAll(tableName, selectStr, whereJson, orderByJson = '', limitArr = '') {
const andWhere = whereJson['and']
const orWhere = whereJson['or']
const betArr = whereJson['between']
const andArr = []
const orArr = []
for(const key in andWhere) {
const snap = typeof andWhere[key] === 'string' ? '\"' : ''
andArr.push(`\`${key}\` = ${snap}${andWhere[key]}${snap}`)
}
for(const key in orWhere) {
const snap = typeof andWhere[key] === 'string' ? '\"' : ''
orArr.push(`\`${key}\` = ${snap}${orWhere[key]}${snap}`)
}
const andStr = andArr.join(' and ')
const orStr = orArr.join(' or ')
const betStr = betArr ? `AND ${betArr[0]} BETWEEN ${betArr[1]} AND ${betArr[2]}` : ''
const orderStr = orderByJson['type'] ? `order by ${orderByJson['key']} ${orderByJson['type']}` : ''
const limitStr = limitArr.length > 0 ? `limit ${limitArr.join(',')}` : ''
const sqlMod = `SELECT ${selectStr} FROM ${tableName} WHERE ${andStr} ${orStr} ${betStr} ${orderStr} ${limitStr}`
return new Promise((resolve, reject) => {
pool.query(sqlMod, function (error, results) {
if (error) {
reject(error)
} else resolve(results)
})
})
}
connectionQuery(connection, sql, callback) {
connection.query(sql, function (err, result) {
if (err) callback(err, null)
else callback(null, result)
})
}
transaction(connection, tasks) {
return new Promise((resolve, reject) => {
connection.beginTransaction(err => {
if (err) {
reject(err)
return
}
async.series(tasks, (error, result) => {
if (error) reject(error)
resolve(connection, result)
})
})
})
}
commit(connection) {
return new Promise((resolve, reject) => {
connection.commit(err => {
if (err) reject(err)
else resolve(connection)
})
})
}
rollback(connection) {
return new Promise(resolve => {
connection.rollback(() => {
connection.release()
resolve()
})
})
}
}
module.exports = DB
<file_sep>/app/common/config/weixin.js
const dev = {
appid: 'wxeb9294b7da15c1b7',
secret: '<KEY>',
token: 'amEQlniuKvibA7rzvsgq',
redirectUrl: 'https://www.jsvue.cn/api/weixin/callback',
takenUrl: 'https://open.weixin.qq.com/connect/qrconnect?appid=APPID&redirect_uri=REDIRECT_URI&response_type=code&scope=SCOPE&state=STATE#wechat_redirect',
authUrl: 'https://api.weixin.qq.com/sns/oauth2/access_token?appid=APPID&secret=SECRET&code=CODE&grant_type=authorization_code'
};
const prod = {
appid: 'wxeb<KEY>',
secret: '<KEY>',
token: 'am<KEY>',
redirectUrl: 'https://www.jsvue.cn/api/weixin/callback',
takenUrl: 'https://open.weixin.qq.com/connect/qrconnect?appid=APPID&redirect_uri=REDIRECT_URI&response_type=code&scope=SCOPE&state=STATE#wechat_redirect',
authUrl: 'https://api.weixin.qq.com/sns/oauth2/access_token?appid=APPID&secret=SECRET&code=CODE&grant_type=authorization_code'
};
module.exports = process.env.NODE_ENV === 'production' ? prod : dev;
<file_sep>/app/action/common.action.js
const sha1 = require('sha1');
const SMS = require('../controllers/sms');
const RD = require('../common/helper/redis');
const api = require('../common/helper/api');
const utils = require('../utils/index');
const logger = require('../controllers/logger');
const { codeExpire } = require('../common/config/server');
const { appid, secret, token, redirectUrl, takenUrl, authUrl } = require('../common/config/weixin');
const commonControl = {
/**
* 发送登录、注册验证码
* @param phone
* @returns {Promise<*>}
*/
sendCodeLogin: async (phone) => {
const code = utils.randomNumber(6);
await RD.set(phone, code);
await RD.pexpireat(phone, Date.parse(new Date()) + codeExpire * 60000)
try {
return process.env.NODE_ENV === 'production' ? await SMS.smsLogin(phone, code) : {result: 0};
} catch (err) {
logger.error(err);
}
},
/**
* 发送找回密码验证码
* @param phone
* @returns {Promise<*>}
*/
sendCodePass: async (phone) => {
const code = utils.randomNumber(6);
await RD.set(phone, code);
await RD.pexpireat(phone, Date.parse(new Date()) + codeExpire * 60000)
try {
return process.env.NODE_ENV === 'production' ? await SMS.smsPass(phone, code) : {result: 0};
} catch (err) {
logger.error(err);
}
},
/**
* 发送验证手机号验证码
* @param phone
* @returns {Promise<*>}
*/
sendCodeCheck: async (phone) => {
const code = utils.randomNumber(6);
await RD.set(phone, code);
await RD.pexpireat(phone, Date.parse(new Date()) + codeExpire * 60000)
try {
return process.env.NODE_ENV === 'production' ? await SMS.smsCheck(phone, code) : {result: 0};
} catch (err) {
logger.error(err);
}
},
/**
* 生成短链接
* @param address
* @returns {Promise<*>}
*/
creatShortURL: async (address) => {
const url = 'https://api.weibo.com/2/short_url/shorten.json';
const headers = {
referer: 'https://api.weibo.com',
host: 'api.weibo.com'
};
try {
return await api.referer(url, headers, { source: 2849184197, url_long: address });
} catch (err) {
logger.error(err);
}
},
/**
* 微信验证服务
* @param timestamp
* @param nonce
* @returns {Promise<*>}
*/
checkSignature: async (timestamp, nonce) => {
let str = [token, timestamp, nonce].sort().join('');
return sha1(str);
},
getStartUrl: async (state) => {
let url = takenUrl;
url = url.replace('APPID', appid)
url = url.replace('REDIRECT_URI', encodeURIComponent(redirectUrl))
url = url.replace('SCOPE', 'snsapi_login')
url = url.replace('STATE', state)
return url
},
/**
* 微信access_token
* @param code
* @returns {Promise<*>}
*/
getAccessToken: async (code) => {
let url = authUrl;
url = url.replace('APPID', appid)
url = url.replace('SECRET', secret)
url = url.replace('CODE', code)
try {
return await api.fetch(url);
} catch (err) {
logger.error(err);
}
},
/**
* 取微信个人信息
* @param access_token
* @param openid
* @returns {Promise<void>}
*/
getUserInfo: async (access_token, openid) => {
const url = 'https://api.weixin.qq.com/sns/userinfo';
try {
return await api.fetch(url, { access_token, openid });
} catch (err) {
logger.error(err);
}
}
};
module.exports = commonControl;
<file_sep>/app/action/dept.action.js
const Mysql = require('../common/helper/mysql')
const DB = new Mysql('wap_group')
const BaseAction = require('./base.action.class')
const DeptModel = require('../model/dept.model')
const deptModel = new DeptModel(DB)
const { treeSortCreat } = require('../utils/tree') // 树形结构
class DeptAction extends BaseAction {
constructor (model) {
super(model)
}
/**
* 取部门
* @param id
* @returns {Promise<void>}
*/
async getDeptLevel (id) {
const { level } = await super.getInfo(id)
return level
}
async getTreeDept () {
const rows = await super.getAll()
return treeSortCreat(rows)
}
}
module.exports = new DeptAction(deptModel)
<file_sep>/app/routes/menu.js
const router = require('koa-router')();
const action = require('../action/menu.action');
const logger = require('../controllers/logger');
const c = require('../controllers/decorator');
const DateFmt = require('../utils/date');
router.prefix('/menu');
// 获取树形菜单列表
router.get('/', c.oAuth, async (ctx, next) => {
const treeMenu = await action.getTreeMenu();
if (treeMenu) {
ctx.data = treeMenu;
return;
} else ctx.throw('获取树形菜单失败', 400);
});
// 获取菜单详情
router.get('/:id', c.oAuth, async (ctx, next) => {
const { id } = ctx.params;
const info = await action.getInfo(id);
if (info) {
ctx.data = info;
} else {
ctx.msg = '未找到菜单信息';
}
return;
});
router.post('/add', c.oAuth, c.invalid, async (ctx, next) => {
const { pid, name, title, path, icon, component, status } = ctx.request.body;
let level
// 验重参
if (await action.checkInfo({name})) {
ctx.msg = '路由名称已存在,请更改重试';
return
}
if (await action.checkInfo({path})) {
ctx.msg = '路由路径已存在,请更改重试';
return
}
if (parseInt(pid) === 0) level = parseInt(pid);
else level = await action.getMenuLevel(pid);
const menu = await action.add({ pid, name, title, path, icon, level: ++level, component, status });
if (menu && menu.insertId) {
logger.console(`菜单标题-${title}-添加成功:id为${menu.insertId}`);
ctx.data = DateFmt.now();
return;
} else {
ctx.throw('菜单添加失败', 400);
}
});
router.delete('/del/:id', c.oAuth, async (ctx, next) => {
const { id } = ctx.params;
const info = await action.getInfo(id);
const children = await action.getChild(id);
if (!info) {
ctx.msg = '没有ID菜单信息';
return
}
if (children.length) {
ctx.msg = '菜单下有菜单,不能删除';
return
}
const rs = await action.del(id);
if (rs.affectedRows === 1) {
ctx.data = DateFmt.now();
return;
} else {
ctx.throw('菜单删除错误', 400);
}
});
router.patch('/edit', c.oAuth, c.invalid, async (ctx, next) => {
const { id, pid, name, title, path, icon, component, status } = ctx.request.body;
let level;
// 验重参
if (await action.checkInfo({name}, id)) {
ctx.msg = '路由名称已存在,请更改重试';
return
}
if (await action.checkInfo({path}, id)) {
ctx.msg = '路由路径已存在,请更改重试';
return
}
if (parseInt(pid) === 0) level = parseInt(pid);
else level = await action.getMenuLevel(pid);
const rs = await action.edit(id, { pid, name, title, path, level: ++level, icon, component, status });
if (rs.affectedRows === 1) {
ctx.data = DateFmt.now();
return;
} else {
ctx.throw('菜单更新失败', 400);
}
});
router.get('/order', c.oAuth, async (ctx, next) => {
const { id } = ctx.session.user;
});
module.exports = router;
<file_sep>/app/common/config/redis.js
const dev = {
host: '127.0.0.1',
port: '6379',
password: '<PASSWORD>'
};
const prod = {
host: '127.0.0.1',
port: '6379',
password: '<PASSWORD>'
};
module.exports = process.env.NODE_ENV === 'production' ? prod : dev
<file_sep>/app/action/menu.action.js
const Mysql = require('../common/helper/mysql');
const DB = new Mysql('wap_menu');
const BaseAction = require('./base.action.class')
const MenuModel = require('../model/menu.model')
const menuModel = new MenuModel(DB)
const { treeSortCreat } = require('../utils/tree');
class MenuAction extends BaseAction {
constructor (model) {
super(model)
}
/**
* 获取树形结构
* @param info
* @returns {Promise<void>}
*/
async getTreeMenu (info) {
let sqlMod;
if (info) {
const {status, mid} = info;
sqlMod = `SELECT * FROM \`wap_menu\` WHERE status = ${status} AND id in (${mid})`;
} else sqlMod = 'SELECT * FROM `wap_menu`';
const rows = await super.getAll(sqlMod);
return treeSortCreat(rows);
}
/**
* 获取其层级
* @param id
* @returns {Promise<void>}
*/
async getMenuLevel (id) {
const { level } = await super.getInfo(id);
return level
}
/**
* 验证是否存在
* @param info 查找条件
* @param id 自有ID
* @returns {Promise<boolean>}
*/
async checkInfo (info, id) {
const row = await super.getRow(info);
if (id && row) {
return !(row.id === parseInt(id))
}
return !!row
}
// 事务
async excute () {
await unitModel.ab();
}
}
module.exports = new MenuAction(menuModel)
<file_sep>/app/model/auth.model.js
const logger = require('../controllers/logger')
/**
* 用户相关的数据库操作
* @param DB
*/
const auth = DB => {
return {}
}
module.exports = auth
<file_sep>/app/controllers/logger.js
const log4js = require('log4js');
const config = require('../common/config/logger');
const { formatError, formatRes } = require('./formatLog');
// 配置log4js
log4js.configure(config);
// 定义错误类型
const errorLogger = log4js.getLogger('error');
const resLogger = log4js.getLogger('response');
const infoLogger = log4js.getLogger('info');
const logger = {
errLogger: (ctx, error, resTime) => {
errorLogger.error(formatError(ctx, error, resTime));
},
resLogger: (ctx, resTime) => {
resLogger.info(formatRes(ctx, resTime));
},
info: (args) => {
args.shift();
infoLogger.info(args.join(' '));
},
console: str => {
infoLogger.info(str);
},
error: obj => {
const str = JSON.stringify(obj);
errorLogger.error(str);
}
};
module.exports = logger;
<file_sep>/app.js
const Koa = require('koa');
const app = new Koa();
// 多个中间件
const root = require('./middleware/root');
const connect = require('./middleware/connect');
const sess = require('./middleware/sess');
const router = require('./middleware/router');
const database = require('./middleware/database');
root(app);
connect(app);
sess(app);
router(app);
database(app);
module.exports = app;
<file_sep>/middleware/router.js
const glob = require('glob');
const path = require('path');
const routesPath = path.resolve(__dirname, '../app/routes');
/**
* 路由中间件
* @param app
*/
const router = app => {
// 把路由表文件夹的路由拼接
glob.sync(path.resolve(routesPath, './*.js')).forEach(path => {
const router = require(path);
app.use(router.routes()).use(router.allowedMethods());
});
};
module.exports = router;
<file_sep>/app/common/config/logger.js
const path = require('path');
module.exports = {
appenders: {
error: {
type: 'file',
category: 'errLogger',
filename: path.join(__dirname, '../../../bin/logs/error/err.log'),
maxLogSize: 104800,
backups: 100
},
response: {
type: 'dateFile',
category: 'resLogger',
filename: path.join(__dirname, '../../../bin/logs/responses/'),
pattern: 'yyyy-MM-dd.log',
alwaysIncludePattern: true,
maxLogSize: 104800,
backups: 100
}
},
categories: {
error: {appenders: ['error'], level: 'error'},
response: {appenders: ['response'], level: 'info'},
default: {appenders: ['response'], level: 'info'}
},
replaceConsole: true
};
<file_sep>/app/action/access.action.js
const Mysql = require('../common/helper/mysql')
const DB = new Mysql('wap_access')
const BaseAction = require('./base.action.class')
const AccessModel = require('../model/access.model')
const accessModel = new AccessModel(DB)
const { treeSortCreat } = require('../utils/tree') // 树形结构
class AccessAction extends BaseAction {
/**
* 构造函数
* @param model
*/
constructor (model) {
super(model)
}
/**
* 取出记录 树形结构
* @returns {Promise<void>}
*/
async getTreeAccess () {
const rows = await super.getAll()
return treeSortCreat(rows)
}
/**
* 查出等级
* @param id
* @returns {Promise<*>}
*/
async getAccessLevel (id) {
const { level } = await super.getInfoById(id)
if (level) return level
else throw new Error(`getAccessLevel中level:${level}`)
}
/**
* 获取权限菜单
* @param ids
* @returns {Promise<*>}
*/
async getRows (ids) {
const sqlMod = `SELECT \`mid\` FROM \`wap_access\` WHERE \`id\` in (${ids})`;
return await super.query(sqlMod)
}
}
module.exports = new AccessAction(accessModel)
<file_sep>/app/model/user.model.js
const logger = require('../controllers/logger')
/**
* 用户相关的数据库操作
* @param DB
*/
const user = DB => {
return {
async add(info) {
try {
return res = await DB.insert(info)
} catch (err) {
logger.error(err)
}
},
async update(id, info) {
try {
return res = await DB.update({id}, info)
} catch (err) {
logger.error(err)
}
},
async getByUserId(id) {
try {
return res = await DB.fetchRow({id})
} catch (err) {
logger.error(err)
}
},
async getInfoByJson(info) {
try {
return res = await DB.fetchRow(info)
} catch (err) {
logger.error(err)
}
},
async updateScore(sql) {
try {
return res = await DB.queryStr(sql)
} catch (err) {
logger.error(err)
}
},
async getRowsByJson(where) {
try {
return res = await DB.fetchAll(where)
} catch (err) {
logger.error(err)
}
}
}
};
module.exports = user;
<file_sep>/app/routes/auth.js
const router = require('koa-router')();
const action = require('../action/user.role.action');
const actionRoleAccess = require('../action/role.access.action');
const actionAccess = require('../action/access.action');
const actionMenu = require('../action/menu.action');
const logger = require('../controllers/logger');
const c = require('../controllers/decorator');
const DateFmt = require('../utils/date');
router.prefix('/auth');
router.get('/', async (ctx, next) => {
ctx.type = 'html';
await ctx.render('index', {
title: 'John'
});
});
router.get('/menu', c.oAuth, async (ctx, next) => {
const { id } = ctx.session.user;
// 查找用户角色
const userRoleInfo = await action.getRow({uid: id});
if (!userRoleInfo) {
logger.console(`ID${id}用户查找角色对应关系发生异常!`);
ctx.msg = '用户信息有误';
return
}
// 角色
const {rid} = userRoleInfo;
const access = await actionRoleAccess.getRows({rid});
if (access.length) {
// 根据权限查找菜单ids
const aids = access.map(ret => ret.aid);
const rs = await actionAccess.getRows([...new Set(aids)]);
const mid = rs.map(ret => ret.mid);
// 根据菜单获取权限菜单树
const treeMenu = await actionMenu.getTreeMenu({status: 1, mid: [...new Set(mid)]});
if (treeMenu) {
ctx.data = treeMenu;
return;
} else ctx.throw('获取树形权限菜单失败', 400);
} else {
logger.console(`ID${id}用户暂无角色对应关系`);
ctx.data = access;
}
});
router.get('/:id', c.oAuth, async (ctx, next) => {
const { id } = ctx.session.user;
});
module.exports = router;
<file_sep>/README.md
# wapman-admin
wapman koa2后台管理 PC+移动端
问:为什么要移动端?<br>
答:因为楼主很多时间是玩手机
问:为什么用nodejs 做后端?<br>
答:因为楼主是个前端
问:为什么项目经常更新?<br>
答:因为楼主主要是为了好玩
问:为什么代码有些写的好,有些很差?<br>
答:因为我们是一个团队在玩
问:如果我想参加你们团队,可以吗?<br>
答:可以,下面有微信群号,暗号:我要挖坑
问:你们项目怎么使用?<br>
答:你怎么这么多问题,拒绝“伸手党”
<file_sep>/app/utils/tree.js
const utils = require('./index');
const filterArray = (list, level, pid) => {
const arr = list.filter(ret => ret.level === level && ret.pid === pid);
arr.sort((a, b) => a.order - b.order);
return arr
};
exports.treeSortCreat = list => {
if (!utils.isArray(list)) return;
// 取出第一级
const array1 = filterArray(list, 1, 0);
// 拼接结果
return array1.map(ret => {
// 取出第二级
const array2 = filterArray(list, ret.level + 1, ret.id);
// 取出第三级
ret.children = array2.map(meta => {
meta.children = filterArray(list, meta.level + 1, meta.id);
return meta
});
return ret
})
};
<file_sep>/app/utils/date.js
const moment = require('moment')
class DateFmt {
constructor () {
}
now () {
return moment().format('YYYY-MM-DD HH:mm:ss')
}
nowToName () {
return moment().format('YYYYMMDDHHmmss')
}
}
module.exports = new DateFmt()
<file_sep>/app/routes/role.js
const router = require('koa-router')();
const action = require('../action/role.action');
const actionDept = require('../action/dept.action');
const logger = require('../controllers/logger');
const c = require('../controllers/decorator');
const DateFmt = require('../utils/date');
router.prefix('/role');
router.get('/', c.oAuth, c.invalid, async (ctx, next) => {
const { gid, pageIndex = 1, pageSize = 20 } = ctx.request.query;
const page = parseInt(pageIndex);
const limit = parseInt(pageSize);
let params = { pageIndex: page, pageSize: limit };
if (parseInt(gid)) params = Object.assign(params, {gid});
const rows = await action.getRoles(params);
const count = await action.countRoles(params);
// gid -> gname
const groups = await actionDept.getAll();
rows.map(ret => {
ret.gid = groups[groups.findIndex(met => met.id === ret.gid)].name
return ret
})
if (rows) {
ctx.data = {
pageIndex: page,
pageSize: limit,
list: rows,
count
};
return
} else ctx.throw('获取树形分组失败', 400);
});
router.post('/add', c.oAuth, c.invalid, async (ctx, next) => {
const { gid, name, status } = ctx.request.body;
const role = await action.add({ gid, name, status });
if (role.insertId) {
logger.console(`角色-${name}-添加成功:id为${role.insertId}`);
ctx.data = DateFmt.now();
return
} else {
ctx.throw('角色添加失败', 400);
}
});
router.delete('/del/:id', c.oAuth, async (ctx, next) => {
const { id } = ctx.params;
const rs = await action.del(id);
if (rs.affectedRows === 1) {
ctx.data = DateFmt.now();
return
} else {
ctx.throw('角色删除错误', 400);
}
});
router.patch('/edit', c.oAuth, c.invalid, async (ctx, next) => {
const { id, gid, name, status } = ctx.request.body;
const rs = await action.edit(id, { gid, name, status });
if (rs.affectedRows === 1) {
ctx.data = DateFmt.now();
return
} else {
ctx.throw('角色更新失败', 400);
}
});
// 批量移动组角色
router.patch('/remove', c.oAuth, async (ctx, next) => {
const { id, gid } = ctx.request.body;
const rs = await action.removeRole(id, gid);
if (rs.affectedRows) {
ctx.data = DateFmt.now();
return
} else {
ctx.throw('角色更新失败', 400);
}
});
module.exports = router;
<file_sep>/app/action/user.action.js
const Mysql = require('../common/helper/mysql');
const RD = require('../common/helper/redis');
const COS = require('../controllers/cos');
const utils = require('../utils/index');
const crypto = require('../utils/crypto');
const logger = require('../controllers/logger');
const DB = new Mysql('wap_user');
const userModel = require('../model/user.model')(DB);
const { errLogin } = require('../common/config/server');
const userControl = {
/**
* 随机生成邀请码
* @returns {Promise<void>}
*/
createInviteCode: async () => {
async function getInvite() {
const invite = utils.randomNumber(6);
const userInfo = await userModel.getInfoByJson({cid: invite});
if (!!userInfo) {
logger.console('生成邀请码重复存在: ' + invite);
await getInvite();
} else {
return invite;
}
}
// 递归生成邀请码
return await getInvite();
},
/**
* 分散计算pid
* @param invite
* @returns {Promise<*>}
*/
calcInviteId: async (invite) => {
if (!invite) return null;
const { all_pid } = await userModel.getInfoByJson({cid: invite});
if (!!all_pid) {
return `${invite},${all_pid}`;
} else {
return invite;
}
},
checkInfo: async (info) => {
const row = await userModel.getInfoByJson(info);
return !!row;
},
checkPass: async (name, password) => {
const row = await userModel.getInfoByJson({name});
if (Boolean(row)) {
return row.password === password ? row : null;
} else {
logger.console(`验证密码时找不到用户信息:${name}`);
return;
}
},
cryptPass: (password) => {
return crypto.md5(crypto.aesDecrypt(password)).toString();
},
cryptSession: (hash) => {
return crypto.md5(hash).toString();
},
/**
* 是否锁住
* @param name
* @returns {Promise<boolean>}
*/
isLoginLock: async (name) => {
const val = await RD.get(name) || 0;
return val >= 20;
},
/**
* 登陆错误
* @param name
* @returns {Promise<number>}
*/
setTimeRedis: async (name) => {
let val = await RD.get(name) || 0;
await RD.set(name, ++val);
await RD.pexpireat(name, Date.parse(new Date()) + errLogin * 60000);
return 1;
},
/**
* 注册用户
* @param info
* @returns {Promise<*>}
*/
registerUser: async (info) => {
info = Object.assign({}, {
email: '',
sex: 0
}, info);
return await userModel.add(info);
},
uploadUserInfo: async (id, info) => {
return await userModel.update(id, info);
},
getInfoById: async (id) => {
return await userModel.getByUserId(id);
},
getInfoByPhone: async (phone) => {
return await userModel.getInfoByJson({phone});
},
uploadFile: async (base64) => {
const base64Data = base64.replace(/^data:image\/\w+;base64,/, "");
const dataBuffer = new Buffer(base64Data, 'base64');
return await COS.uploadImg(dataBuffer);
},
};
module.exports = userControl;
<file_sep>/app/routes/member.js
const router = require('koa-router')();
const action = require('../action/user.role.action');
const logger = require('../controllers/logger');
const c = require('../controllers/decorator');
const DateFmt = require('../utils/date');
router.prefix('/member');
router.get('/', c.oAuth, async (ctx, next) => {
const { gid, pageIndex = 1, pageSize = 20 } = ctx.request.query;
const page = parseInt(pageIndex);
const limit = parseInt(pageSize);
let params = { pageIndex: page, pageSize: limit };
if (parseInt(gid)) params = Object.assign(params, {gid});
const rows = await action.getMembers(params);
const count = await action.countMembers(params);
if (rows) {
ctx.data = {
pageIndex: page,
pageSize: limit,
list: rows,
count
};
return
} else ctx.throw('获取树形分组失败', 400);
});
module.exports = router;
<file_sep>/app/action/base.action.class.js
/**
* 基础操作类
*/
class BaseAction {
/**
* 构造方法
* @param model 数据表模型
*/
constructor (model) {
this.Model = model
}
/**
* 添加记录
* @param info 记录对象
* @returns {Promise<*>} 返回插入ID
*/
async add (info) {
return await this.Model.add(info)
}
/**
* 修改记录
* @param id 记录主键
* @param info 记录对象
* @returns {Promise<*>} 返回受影响行数
*/
async edit (id, info) {
return await this.Model.update(id, info)
}
/**
* 删除记录
* @param id 记录主键
* @returns {Promise<void>}
*/
async del (id) {
return await this.Model.delete(id)
}
/**
* 单条记录
* @param id
* @returns {Promise<*>}
*/
async getInfo (id) {
return await this.Model.getInfoById(id)
}
/**
* 取所有的记录
* @returns {Promise<*>}
*/
async getAll () {
return await this.Model.getAll()
}
/**
* 取所有子类
* @param pid 父类ID
* @returns {Promise<*>}
*/
async getChild (pid) {
return await this.Model.getRows({pid})
}
/**
* 根据条件取子集
* @param info
* @returns {Promise<*>}
*/
async getRow (info) {
return await this.Model.getRow(info)
}
/**
* 根据条件查询
* @param info
* @returns {Promise<*>}
*/
async getRows (info) {
return await this.Model.getRows(info)
}
/**
* 自定义sql
* @param sql
* @returns {Promise<*>}
*/
async query (sql) {
return await this.Model.query(sql)
}
}
module.exports = BaseAction
<file_sep>/app/controllers/responseFmt.js
/**
* 在app.use(router)之前调用
*/
const response_formatter = async (ctx, next) => {
//先去执行路由
await next();
if (ctx.type) return;
//如果有返回数据,将返回数据添加到data中
if (ctx.msg !== undefined) {
ctx.body = {
code: 400,
msg: ctx.msg,
data: false
}
} else if (ctx.data !== undefined) {
ctx.body = {
code: 0,
msg: '',
data: ctx.data
}
} else if (ctx.auth !== undefined) {
ctx.body = {
code: 110,
msg: ctx.auth,
data: false
}
}
}
module.exports = response_formatter
<file_sep>/APP.README.md
## 项目结构
* app [后台业务]
* action [控制层]
* common [公共]
* config [配置文件]
* helper [操作类库]
* controllers [装饰层]
* model [数据模型]
* routes [路由表]
* utils [工具]
* views [模版 - 暂时没用到]
* bin [运维]
* logs [日志]
* wapman.sql [数据库脚本]
* www [启动脚本]
* middleware [中间件]
* connect [koa混合中间件]
* database [数据连接中间件]
* root [根目录中间件 - 星空背景]
* router [路由绑定中间件]
* sess [会话中间件]
* public [静态目录]<file_sep>/app/controllers/decorator.js
const utils = require('../utils/index');
const crypto = require('../utils/crypto');
const { codeExpire, codeValid } = require('../common/config/server');
function Decorator() {
}
/**
* 验证登录
* @param ctx
* @param next
* @returns {Promise<void>}
*/
Decorator.prototype.oAuth = async (ctx, next) => {
ctx.app.on('missed', () => {
ctx.auth = '验证失败,请登陆'
});
ctx.app.on('expired', () => {
ctx.auth = '验证已过期,请重新登陆'
});
if(ctx.session.user) await next();
else ctx.auth = '验证已失效,请重新登陆'
}
/**
* 验证参数
* @param ctx
* @param next
* @returns {Promise<void>}
*/
Decorator.prototype.invalid = async (ctx, next) => {
const routeRules = require('./rules');
const rules = routeRules[ctx.path]
if (!utils.isArray(rules)) ctx.throw('rule must be array', 400)
let msg = ''
const passReg = new RegExp('pass')
const params = ctx.request.method === 'GET' ? ctx.request.query : ctx.request.body
for (let i = 0; i < rules.length; i++) {
const rule = rules[i]
const param = passReg.test(rule.name) ? crypto.aesDecrypt(params[rule.name]) : params[rule.name]
// 必填
if (rule.require && !param || rule.require && utils.isEmpty(param)) {
msg = `${rule.desc}不能为空`
break
}
// 长度判断
if (param && rule.max < param.length) {
msg = `${rule.desc}不能多于${rule.max}位`
break
}
if (param && rule.min > param.length) {
msg = `${rule.desc}不能少于${rule.min}位`
break
}
// 正则
if (rule.pattern) {
const reg = new RegExp(rule.pattern)
if (!reg.test(param)) {
msg = `${rule.desc}不是有效的值`
break
}
}
}
if (msg) ctx.msg = msg
else await next()
}
/**
* 验证码无效
* @param ctx
* @param next
* @returns {Promise<any>}
*/
Decorator.prototype.checkCode = async (ctx, next) => {
const RD = require('../common/helper/redis');
const { phone, code } = ctx.request.body;
// 手机号是否存在redis
const isPhone = await RD.exists(phone);
if (!isPhone) {
ctx.msg = '手机号还未发送验证码';
return;
}
const val = await RD.get(phone);
const index = await RD.pttl(phone);
if (val === code) {
if (index > ((codeExpire - codeValid) * 60000)) {
RD.del(phone);
await next();
} else {
ctx.msg = '验证码已过期';
return;
}
} else {
if (index === -1) ctx.msg('存在无过期的验证码');
else ctx.msg = '验证码不正确';
return;
}
}
module.exports = new Decorator();
<file_sep>/app/controllers/cos.js
const COS = require('cos-nodejs-sdk-v5');
const { AppId, SecretId, SecretKey, Bucket, Region } = require('../common/config/cos')
const logger = require('./logger');
const date = require('../utils/date');
const utils = require('../utils/index');
class Cos {
constructor(AppId, SecretId, SecretKey, Bucket, Region) {
this.Bucket = Bucket;
this.Region = Region;
this.cos = new COS({ AppId, SecretId, SecretKey });
}
async uploadImg(stream) {
const prefix = '/avatar';
const name = date.nowToName() + utils.randomString(8);
return new Promise((resolve, reject) => {
this.cos.putObject({
Bucket : this.Bucket,
Region : this.Region,
Key : `${prefix}/${name}.png`,
Body: stream
}, (err, data) => {
if(err) {
logger.error(`腾讯云存储上传头像错误-${JSON.stringify(err)}`)
reject(err);
} else {
logger.console(`腾讯云存储上传头像成功${prefix}/${name}.png`)
resolve(data);
}
});
});
}
}
module.exports = new Cos(AppId, SecretId, SecretKey, Bucket, Region);
<file_sep>/app/routes/dept.js
const router = require('koa-router')();
const action = require('../action/dept.action');
const logger = require('../controllers/logger');
const c = require('../controllers/decorator');
const DateFmt = require('../utils/date');
router.prefix('/dept');
router.get('/', c.oAuth, async (ctx, next) => {
const treeDept = await action.getTreeDept();
if (treeDept) {
ctx.data = treeDept;
return
} else ctx.throw('获取树形分组失败', 400);
});
router.get('/:id', c.oAuth, async (ctx, next) => {
const { id } = ctx.params;
const info = await action.getInfo(id);
if (info) {
ctx.data = info;
} else {
ctx.msg = '未找到分组信息';
}
return
});
router.post('/add', c.oAuth, c.invalid, async (ctx, next) => {
const { pid, name, status } = ctx.request.body;
let level;
if (parseInt(pid) === 0) level = parseInt(pid);
else level = await action.getDeptLevel(pid);
const dept = await action.add({ pid, name, level: ++level, status });
if (dept.insertId) {
logger.console(`分组-${name}-添加成功:id为${dept.insertId}`);
ctx.data = DateFmt.now();
return
} else {
ctx.throw('分组添加失败', 400);
}
});
router.delete('/del/:id', c.oAuth, async (ctx, next) => {
const { id } = ctx.params;
const info = await action.getInfo(id);
const children = await action.getChild(id);
if (!info) {
ctx.msg = '没有ID分组信息';
return
}
if (children.length) {
ctx.msg = '分组下有分组,不能删除';
return
}
const rs = await action.del(id);
if (rs.affectedRows === 1) {
ctx.data = DateFmt.now();
return
} else {
ctx.throw('分组删除错误', 400);
}
});
router.patch('/edit', c.oAuth, c.invalid, async (ctx, next) => {
const { id, pid, name, status } = ctx.request.body;
let level;
if (parseInt(pid) === 0) level = parseInt(pid);
else level = await action.getDeptLevel(pid);
const rs = await action.edit(id, { pid, name, level: ++level, status });
if (rs.affectedRows === 1) {
ctx.data = DateFmt.now();
return
} else {
ctx.throw('分组更新失败', 400);
}
});
module.exports = router;
<file_sep>/app/common/config/cos.js
const prod = {
AppId: 1252359508,
SecretId: '<KEY>',
SecretKey: '<KEY>',
Bucket: 'wapman',
Region: 'ap-shanghai'
};
const dev = {
AppId: 1252359508,
SecretId: '<KEY>',
SecretKey: '<KEY>',
Bucket: 'wapman',
Region: 'ap-shanghai'
};
module.exports = process.env.NODE_ENV === 'production' ? prod : dev
|
ed704f413e4d86edef81ed0d83368b6a52e926e1
|
[
"JavaScript",
"SQL",
"Markdown"
] | 39 |
JavaScript
|
xiejunping/wapman-admin
|
cbb5a97ba15ab3bdb4af45b868e59546f347a321
|
9c29f07a429459a9116f29e6d8f12202a67fa421
|
refs/heads/master
|
<repo_name>TajinderSingh29/ELF-06june19-Covalense-TajinderSandhu<file_sep>/CoreJava/src/com/covalense/coreJavaApp/casting/Bus.java
package com.covalense.corejavaapp.casting;
public class Bus {
int model;
void move() {
System.out.println("move() method is call");
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/collection/Product.java
package com.covalense.corejavaapp.collection;
public class Product implements Comparable<Product> {
int cost;
String name;
double rating;
@Override
public int compareTo(Product o) {
Double d = this.rating;
Double e = o.rating;
return -(d.compareTo(e));
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/inheritance/TestLevel.java
package com.covalense.corejavaapp.inheritance;
public class TestLevel {
public static void main(String[] args) {
Level3 l1=new Level3();
l1.level1();
l1.level2();
l1.level3();
System.out.println(" ");
Level12 l2=new Level3();
l2.level1();
l2.level2();
System.out.println(" ");
LevelInher l3=new LevelInher();
l3.level1();
}
}
<file_sep>/Corejava Assignment/Assignment3/src/Level2C.java
public class Level2C extends CarFuel{
void Fuel()
{
System.out.println("Car with 500L fuel");
}
}
<file_sep>/Corejava Assignment/Assignment3/src/StrawberryIceCream.java
public class StrawberryIceCream extends IceCream {
void eat() {
System.out.println("Strawberry IceCream for baby ");
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/collection/foreachloop/TestVector.java
package com.covalense.corejavaapp.collection.foreachloop;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.ListIterator;
import java.util.Vector;
public class TestVector {
public static void main(String[] args) {
Vector<Integer> v=new Vector<Integer>();
v.add(15);
v.add(20);
v.add(25);
v.add(5);
v.add(1);
System.out.println("*************Simple s.o.p**************");
System.out.println(v);
System.out.println("************For Loop***************");
for (int i = 0; i < v.size(); i++) {
Integer r = v.get(i);
System.out.println(r);
}
System.out.println("*************For each**************");
for (Object t : v) {
System.out.println(t);
}
System.out.println("**************Iterator*************");
Iterator<Integer> it = v.iterator();
while (it.hasNext()) {
Object r = it.next();
System.out.println(r);
}
System.out.println("**************ListIterator*************");
System.out.println("*************ForWard**************");
ListIterator<Integer> lit=v.listIterator();
while (lit.hasNext()) {
Object r = lit.next();
System.out.println(r);
}
System.out.println("************Backward***************");
while (lit.hasPrevious()) {
Object r = lit.previous();
System.out.println(r);
}
System.out.println("************Enumeration***************");
Enumeration<Integer> e=v.elements();
while (e.hasMoreElements()) {
Integer k= e.nextElement();
System.out.println(k);
}
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/passObject/WaterBottle.java
package com.covalense.corejavaapp.passobject;
public class WaterBottle {
void open()
{
System.out.println("open and drink a water");
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/passObject/Pen.java
package com.covalense.corejavaapp.passobject;
public class Pen {
void open()
{
System.out.println("open A pen");
}
void write()
{
System.out.println("write with a pen");
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/javabean/TestEmployee2.java
package com.covalense.corejavaapp.javabean;
import java.util.logging.ConsoleHandler;
import java.util.logging.Level;
import java.util.logging.LogManager;
import java.util.logging.Logger;
public class TestEmployee2 {
private static final Logger loger = Logger.getLogger("MainMethod");
public static void main(String[] args) {
LogManager.getLogManager().reset();
loger.setLevel(Level.SEVERE);
ConsoleHandler ch = new ConsoleHandler();
ch.setLevel(Level.SEVERE);
loger.setLevel(Level.SEVERE);
loger.addHandler(ch);
EmployeeBean2 eb = new EmployeeBean2();
eb.setAge(25);
eb.setDept("Software");
eb.setGender("Male");
eb.setId(1);
eb.setName("Sonu");
eb.setSalary(20000);
loger.log(Level.SEVERE, "Name is " + eb.getName());
loger.log(Level.SEVERE, "Age is " + eb.getAge());
loger.log(Level.SEVERE, "Dept is " + eb.getDept());
loger.log(Level.SEVERE, "Id is " + eb.getId());
loger.log(Level.SEVERE, "Gender is " + eb.getGender());
loger.log(Level.SEVERE, "Salary is " + eb.getSalary());
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/colon/methodreferenceapp/Addnumber.java
package com.covalense.corejavaapp.colon.methodreferenceapp;
import lombok.extern.java.Log;
@Log
public class Addnumber {
public static void main(String[] args) {
MyMath a = Demo::sum;
int i = a.add(10, 5);
log.info("Sum is " + i);
}
}
<file_sep>/Corejava Assignment/Assignment5/src/com/covalence/filehandlingapp/assignment/StudentData.java
package com.covalence.filehandlingapp.assignment;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.ObjectOutputStream;
import lombok.extern.java.Log;
@Log
public class StudentData {
public static void main(String[] args) {
StudentBean studentBean= new StudentBean();
studentBean.setAge(22);
studentBean.setName("sonu");
studentBean.setPercentage(90.82);
FileOutputStream fos = null;
ObjectOutputStream obj = null;
try {
fos = new FileOutputStream("student.txt",true);
obj = new ObjectOutputStream(fos);
obj.writeObject(studentBean);
log.info("Done");
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
if (obj != null) {
try {
obj.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/inheritance/Levels3.java
package com.covalense.corejavaapp.inheritance;
public class Levels3 extends Level12 {
@Override
public void speed()
{
System.out.println("The Speed Overlimits 150 ");
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/collection/foreachloop/TestTreeSet.java
package com.covalense.corejavaapp.collection.foreachloop;
import java.util.Iterator;
import java.util.TreeSet;
public class TestTreeSet {
public static void main(String[] args) {
TreeSet ts = new TreeSet();
ts.add(1.5);
ts.add(2.6);
ts.add(3.9);
ts.add(3.8);
System.out.println(ts);
System.out.println("******************");
for (Object t : ts) {
System.out.println(t);
}
System.out.println("******************");
Iterator it = ts.iterator();
while (it.hasNext()) {
Object r = it.next();
System.out.println(r);
}
}
}
<file_sep>/Corejava Assignment/Assignment3/src/Car.java
public class Car {
void Gear() {
System.out.println("Car Have a gear");
}
void break1() {
System.out.println("Car break a gear");
}
}
<file_sep>/Corejava Assignment/Assignment7/src/com/covalence/expressionapp/TestSquare.java
package com.covalence.expressionapp;
public class TestSquare {
public static void main(String[] args) {
Square s=(a) ->
{
return a*a;
};
int i=s.squarecal(5);
System.out.println("Square is "+i);
}
}
<file_sep>/junit/src/main/java/com/covalense/junit/StringOpp.java
package com.covalense.junit;
public class StringOpp {
public int countCharInString(String name) {
return name.length();
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/stream/StudentTest.java
package com.covalense.corejavaapp.stream;
import java.util.ArrayList;
import java.util.Comparator;
import lombok.extern.java.Log;
@Log
public class StudentTest {
static Comparator<Student> c = (i, j) -> {
if (i.percentage > j.percentage) {
return 1;
} else if (i.percentage < j.percentage) {
return -1;
} else {
return 0;
}
};
static Student getTopper(ArrayList<Student> al) {
return al.stream().max(c).get();
}
static Student getleast(ArrayList<Student> al) {
return al.stream().min(c).get();
}
public static void main(String[] args) {
ArrayList<Student> al = new ArrayList<Student>();
Student s1 = new Student("Priya", 65.28);
Student s2 = new Student("neha", 78.25);
Student s3 = new Student("trisha", 88.80);
Student s4 = new Student("aman", 55.25);
al.add(s1);
al.add(s2);
al.add(s3);
al.add(s4);
Student s = getTopper(al);
log.info("Topper is " + s.name +" "+ s.percentage);
Student l = getleast(al);
log.info("Least is " + l.name +" "+ l.percentage);
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/stream/Student.java
package com.covalense.corejavaapp.stream;
public class Student {
String name;
double percentage;
public Student(String name, double percentage) {
this.name = name;
this.percentage = percentage;
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/interfaceTopic/Animal.java
package com.covalense.corejavaapp.interfacetopic;
public interface Animal {
void eat();
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/collection/foreachloop/TestArrayListE.java
package com.covalense.corejavaapp.collection.foreachloop;
import java.util.ArrayList;
import java.util.ListIterator;
public class TestArrayListE {
public static void main(String[] args) {
ArrayList al = new ArrayList();
al.add(15);
al.add(9.2);
al.add('A');
al.add("Riya");
al.add(105);
al.add(91.2);
System.out.println(al);
}
}
<file_sep>/AssesmentProgram/src/com/covalense/corejava/assesmenttest/ques4/EligibilityLessException.java
package com.covalense.corejava.assesmenttest.ques4;
public class EligibilityLessException extends RuntimeException {
private String message;
public EligibilityLessException(String message) {
this.message = message;
}
@Override
public String getMessage() {
return message;
}
@Override
public String toString() {
return message;
}
}
<file_sep>/Corejava Assignment/Assignment6/src/com/covalense/exceptionapp/assignment/CheckedExceptionTest.java
package com.covalense.exceptionapp.assignment;
import lombok.extern.java.Log;
@Log
public class CheckedExceptionTest {
public static void main(String[] args) {
log.info("Main Started");
ValidaterException validaterException = new ValidaterException();
try {
validaterException.validate(0);
} catch (IdCheckedException e) {
log.info(e.getMessage() + "");
}
log.info("Main Ended");
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/anotation/Animal.java
package com.covalense.corejavaapp.anotation;
public class Animal {
void eat() {
System.out.println("Animal eat() method is call");
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/collection/foreachloop/TestArrayListB.java
package com.covalense.corejavaapp.collection.foreachloop;
import java.util.ArrayList;
import java.util.Iterator;
public class TestArrayListB {
public static void main(String[] args) {
ArrayList al = new ArrayList();
al.add(15);
al.add(9.2);
al.add('A');
al.add("Riya");
al.add(105);
al.add(91.2);
Iterator it = al.iterator();
while (it.hasNext()) {
Object r = it.next();
System.out.println(r);
}
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/stream/ArrayTest.java
package com.covalense.corejavaapp.stream;
import java.util.stream.Stream;
import lombok.extern.java.Log;
@Log
public class ArrayTest {
public static void main(String[] args) {
Integer a[] = { 5, 6, 4, 1, 2 };
Stream<Integer> s = Stream.of(a);
s.forEach(i -> log.info("" + i));
Stream<Integer> s1 = Stream.of(9, 10, 5, 6, 7, 8, 9, 5);
s1.forEach(i -> log.info("" + i));
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/inheritance/Animal.java
package com.covalense.corejavaapp.inheritance;
public class Animal
{
void eat() {
System.out.println("Eat What they want");
}
}
class Tiger extends Animal
{
void Run() {
System.out.println("Run");
}
}
class Cow extends Animal
{
void milk() {
System.out.println("milk");
}
}
<file_sep>/Corejava Assignment/Assignment7/src/com/covalence/expressionapp/Square.java
package com.covalence.expressionapp;
public interface Square {
int squarecal(int a);
}
<file_sep>/AssesmentProgram/src/com/covalense/corejava/assesmenttest/ques2/SimpleInterestCalculate.java
package com.covalense.corejava.assesmenttest.ques2;
public interface SimpleInterestCalculate {
double simpleInterest(int p, double r, double i);
}
<file_sep>/Corejava Assignment/Assignment5/src/com/covalence/filehandlingapp/assignment/ReadDataStudent.java
package com.covalence.filehandlingapp.assignment;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.ObjectInputStream;
import lombok.extern.java.Log;
@Log
public class ReadDataStudent {
public static void main(String[] args) {
FileInputStream fis = null;
ObjectInputStream ois = null;
try {
fis = new FileInputStream("student.txt");
ois = new ObjectInputStream(fis);
StudentBean studentBean = (StudentBean) ois.readObject();
log.info("Age is " + studentBean.getAge());
log.info("Name is " + studentBean.getName());
log.info("percentage is " + studentBean.getPercentage());
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} catch (ClassNotFoundException e) {
e.printStackTrace();
} finally {
if (ois != null) {
try {
ois.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
}
<file_sep>/Corejava Assignment/Assignment7/src/com/covalence/expressionapp/TestSumExp.java
package com.covalence.expressionapp;
import lombok.extern.java.Log;
@Log
public class TestSumExp {
public static void main(String[] args) {
Sum s=(a,b)-> a+b;
int i=s.add(5, 6);
log.info("Sum is "+i);
}
}
<file_sep>/Corejava Assignment/Assignment3/src/Baby.java
public class Baby {
void take(IceCream ic)
{
ic.eat();
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/jodadatetime/LocalTimeest.java
package com.covalense.corejavaapp.jodadatetime;
import java.time.LocalTime;
import lombok.extern.java.Log;
@Log
public class LocalTimeest {
public static void main(String[] args) {
LocalTime a = LocalTime.now();
log.info("" + a);
log.info("" + a.getHour());
log.info("" + a.getMinute());
log.info("" + a.getSecond());
log.info("" + a.getNano());
log.info("" + a.minusHours(2));
}
}
<file_sep>/Corejava Assignment/Assignment3/src/Driver.java
public class Driver {
void driving(Car c) {
if(c instanceof Audi)
{
System.out.println("Driver is driving a Audi");
}
else if(c instanceof Benz)
{
System.out.println("Driver is driving a Benz");
}
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/javabean/DB4.java
package com.covalense.corejavaapp.javabean;
public class DB4 {
void receive(Employee e) {
System.out.println("**********DB4*************");
System.out.println(e.getName());
System.out.println(e.getId());
}
}
<file_sep>/AssesmentProgram/src/com/covalense/corejava/assesmenttest/ques4/ValidateException.java
package com.covalense.corejava.assesmenttest.ques4;
import lombok.extern.java.Log;
@Log
public class ValidateException {
void validate(int marks) {
if (marks > 40) {
log.info("student eligibility for the exam");
} else {
throw new EligibilityLessException("student not eligibility for the exam");
}
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/expection/Validater.java
package com.covalense.corejavaapp.expection;
public class Validater {
void validate(String name) {
if(name.length()>=4)
{
System.out.println("Name is validate");
}
else {
throw new NameCharException("Your is not validate because name letter should greater than Four letter");
}
}
}
<file_sep>/Corejava Assignment/Assignment3/src/Receiver.java
public class Receiver {
void receive(Gift g)
{
if (g instanceof MoblieGift)
{
System.out.println("Thanks for Moblie");
}
else if (g instanceof Watch)
{
System.out.println("Thanks for Watch");
}
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/collection/TestArrayListProduct.java
package com.covalense.corejavaapp.collection;
import java.util.ArrayList;
import java.util.Collections;
public class TestArrayListProduct {
public static void main(String[] args) {
ArrayList<Product> al = new ArrayList<Product>();
Product p1 = new Product();
p1.name = "Iphone";
p1.cost = 150000;
p1.rating = 6.5;
Product p2 = new Product();
p2.name = "Bike";
p2.cost = 1500000;
p2.rating = 7.5;
Product p3 = new Product();
p3.name = "Mi";
p3.cost = 15000;
p3.rating = 5.5;
Product p4 = new Product();
p4.name = "Car";
p4.cost = 15000000;
p4.rating = 8.5;
al.add(p1);
al.add(p2);
al.add(p3);
al.add(p4);
Collections.sort(al);
for (Product p : al) {
System.out.println("name is " +p.name);
System.out.println("cost is " + p.cost);
System.out.println("rating is " + p.rating);
System.out.println("****************");
}
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/overLoading/Iphone.java
package com.covalense.corejavaapp.overloading;
public class Iphone extends Phone {
void call()
{
System.out.println("Calls with iphone");
}
void music()
{
System.out.println("music listen with iphone");
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/finalTopic/Pen.java
package com.covalense.corejavaapp.finaltopic;
public class Pen {
final int i=10;
void m()
{
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/constructorrefernce/ProductTest.java
package com.covalense.corejavaapp.constructorrefernce;
import lombok.extern.java.Log;
@Log
public class ProductTest {
public static void main(String[] args) {
MyProduct mp = Product::new;
Product p = mp.getproduct("IPHONE", 800000);
log.info("Name is " + p.name);
log.info("Cost is " + p.cost);
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/abstraction/GDrive.java
package com.covalense.corejavaapp.abstraction;
public class GDrive extends Google {
void shareDocumen() {
System.out.println("Share A document Upto 5GB");
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/expressionapp/CricleCalFunctionTest.java
package com.covalense.corejavaapp.expressionapp;
import java.util.function.Function;
import lombok.extern.java.Log;
@Log
public class CricleCalFunctionTest {
public static void main(String[] args) {
Function<Double, Double> f = r -> 3.142 * r * r;
double x = f.apply(5.2);
log.info(x + "");
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/expressionapp/Factorial.java
package com.covalense.corejavaapp.expressionapp;
public interface Factorial {
int fact(int a);
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/array/MyArrayClassTest.java
package com.covalense.corejavaapp.array;
import lombok.extern.java.Log;
@Log
public class MyArrayClassTest {
public static void main(String[] args) {
long startTime = System.currentTimeMillis();
MyArrayClass myArrayClass = new MyArrayClass();
int dataSize = 12;
for (int i = 0; i < dataSize; i++) {
myArrayClass.add("value "+ i);
}
log.info("Removing 3rd Position");
/* myArrayClass.remove(3); */
for (int i = 0; i <dataSize; i++) {
log.info("Output : " + myArrayClass.get(i));
}
long endTime = System.currentTimeMillis();
log.info("Total Execution time -: " + (endTime - startTime) + "ms");
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/collection/foreachloop/TestArrayListC.java
package com.covalense.corejavaapp.collection.foreachloop;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.ListIterator;
public class TestArrayListC {
public static void main(String[] args) {
ArrayList al = new ArrayList();
al.add(15);
al.add(9.2);
al.add('A');
al.add("Riya");
al.add(105);
al.add(91.2);
ListIterator lit = al.listIterator();
System.out.println("----ForWord");
while (lit.hasNext()) {
Object r = lit.next();
System.out.println(r);
}
System.out.println("----Backward");
while (lit.hasPrevious()) {
Object r = lit.previous();
System.out.println(r);
}
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/inheritance/Calculator.java
package com.covalense.corejavaapp.inheritance;
public class Calculator
{
void add()
{
System.out.println("Add");
}
void sub()
{
System.out.println("sub");
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/casting/TestB.java
package com.covalense.corejavaapp.casting;
public class TestB {
public static void main(String[] args) {
Bus b = new Van();
Van v = (Van) b;
System.out.println(v.model = 3215);
v.move();
System.out.println(v.seats = 50);
v.turn();
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/basicProgram/Seinor.java
package com.covalense.corejavaapp.basicprogram;
public class Seinor
{
public static void main(String[] args) {
int age=50;
double price=700;
double discount=15.2;
if(age>=60)
{
price=price-(price*discount)/100;
System.out.println(price);
}
System.out.println(" your Ticket price is"+price);
}
}
<file_sep>/Corejava Assignment/Assignment4/src/com/covalense/stringapp/assigment/StudentArray.java
package com.covalense.stringapp.assigment;
import java.util.logging.Logger;
public class StudentArray {
private final static Logger loger = Logger.getLogger("Output");
public static void main(String[] args) {
Object[] objects = new Object[4];
StudentBean studentBean = new StudentBean();
studentBean.setName("Priya");
studentBean.setAge(20);
studentBean.setPercentage(68.65);
StudentBean studentBean1 = new StudentBean();
studentBean1.setName("Riya");
studentBean1.setAge(22);
studentBean1.setPercentage(78.65);
StudentBean studentBean2 = new StudentBean();
studentBean2.setName("Trisha");
studentBean2.setAge(24);
studentBean2.setPercentage(88.65);
StudentBean studentBean3 = new StudentBean();
studentBean3.setName("Manpreet");
studentBean3.setAge(26);
studentBean3.setPercentage(98.65);
objects[0] = studentBean;
objects[1] = studentBean1;
objects[2] = studentBean2;
objects[3] = studentBean3;
for (int i = 0; i < objects.length; i++) {
loger.info(objects[i]+"");
}
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/overLoading/MiPhone.java
package com.covalense.corejavaapp.overloading;
public class MiPhone extends Phone{
void call()
{
System.out.println("Calls with miphone");
}
void music()
{
System.out.println("music listen with miphone");
}
}
<file_sep>/Corejava Assignment/Assignment6/src/com/covalense/collectionapp/assignment/EmployeeSortByName.java
package com.covalense.collectionapp.assignment;
import java.util.Comparator;
public class EmployeeSortByName implements Comparator<EmployeeSort> {
@Override
public int compare(EmployeeSort e1, EmployeeSort e2) {
return e1.name.compareTo(e2.name);
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/implementlambda/LoanTest.java
package com.covalense.corejavaapp.implementlambda;
import lombok.extern.java.Log;
@Log
public class LoanTest {
public static void main(String[] args) {
Loan l = Loan.PERSONAL;
int constvalue = l.getValue();
log.info(" Value for personal loan "+constvalue);
}
}
<file_sep>/Corejava Assignment/Assignment6/src/com/covalense/collectionapp/assignment/EmployeeSortById.java
package com.covalense.collectionapp.assignment;
import java.util.Comparator;
public class EmployeeSortById implements Comparator<EmployeeSort> {
@Override
public int compare(EmployeeSort e1, EmployeeSort e2) {
if (e1.id > e2.id) {
return 1;
} else if (e1.id < e2.id) {
return -1;
} else {
return 0;
}
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/passObject/Chips.java
package com.covalense.corejavaapp.passobject;
public class Chips {
void open()
{
System.out.println("open A Chip");
}
void eat()
{
System.out.println("Eat a Chips");
}
}
<file_sep>/Corejava Assignment/Assignment6/src/com/covalense/collectionapp/assignment/EmployeeTest.java
package com.covalense.collectionapp.assignment;
import java.util.ArrayList;
import lombok.extern.java.Log;
@Log
public class EmployeeTest {
public static void main(String[] args) {
Employee employee1 = new Employee();
employee1.name = "Divya";
employee1.id = 5;
employee1.salary = 25000;
Employee employee2 = new Employee();
employee2.name = "Trsiha";
employee2.id = 2;
employee2.salary = 35000;
Employee employee3 = new Employee();
employee3.name = "Priya";
employee3.id = 4;
employee3.salary = 15000;
Employee employee4 = new Employee();
employee4.name = "Riya";
employee4.id = 3;
employee4.salary = 45000;
ArrayList<Employee> arrayList = new ArrayList<Employee>();
arrayList.add(employee1);
arrayList.add(employee2);
arrayList.add(employee3);
arrayList.add(employee4);
for (Employee employee : arrayList) {
log.info("Name is " + employee.name);
log.info("Id is " + employee.id);
log.info("Salary is " + employee.salary);
}
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/javabean/DB2.java
package com.covalense.corejavaapp.javabean;
public class DB2 {
void receive(Person p) {
System.out.println("**********DB2*************");
System.out.println(p.getName());
System.out.println(p.getAge());
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/passObject/Baby.java
package com.covalense.corejavaapp.passobject;
public class Baby {
void take(Chips s)
{
s.open();
s.eat();
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/array/MyArrayClass.java
package com.covalense.corejavaapp.array;
public class MyArrayClass {
private Object[] myArray;
private int index;
public MyArrayClass() {
this(10);
}
public MyArrayClass(int size) {
if (size <= 0) {
throw new ArrayIndexOutOfBoundsException("Size should be greater than zero !!!");
}
myArray = new Object[size];
index = 0;
}// End of Constructor
public void add(Object val) {
if (index >= myArray.length) {
Object[] myArrayNew = new Object[myArray.length * 2];
System.arraycopy(myArray, 0, myArrayNew, 0, myArray.length);
myArray = myArrayNew;
}
myArray[index] = val;
index++;
}
public Object get(int position) {
return myArray[position];
}
public void remove(int position) {
/*
* for (int i =position; i <myArray.length; i++) { if(myArray[position]!=null) {
* myArray[position++]=myArray[position]; } else { myArray[position]=null; } }
*/
if (position > myArray.length) {
throw new ArrayIndexOutOfBoundsException("position is not present in the array !!!");
}
System.arraycopy(myArray, position + 1, myArray, position, myArray.length - 1 - position);
}
public int getSize() {
return index - 1;
}
}// End of Class
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/collection/foreachloop/TestGenericArrayList.java
package com.covalense.corejavaapp.collection.foreachloop;
import java.util.ArrayList;
public class TestGenericArrayList {
public static void main(String[] args) {
ArrayList<Double> al = new ArrayList<Double>();
al.add(9.2);
al.add(2.0);
al.add(23.0);
al.add(25.6);
for (int i = 0; i < al.size(); i++) {
Double d = al.get(i);
System.out.println(d);
}
}
}
<file_sep>/Corejava Assignment/Assignment3/src/Level1.java
public class Level1 extends Gun {
void bullets()
{
System.out.println("Gun is Loaded with 200 bullets");
}
}
<file_sep>/junit/src/test/java/com/covalense/programtest/junit/StringOppMultiValueTest.java
package com.covalense.programtest.junit;
import static org.junit.Assert.*;
import java.util.Arrays;
import java.util.Collection;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import com.covalense.junit.StringOpp;
@RunWith(Parameterized.class)
public class StringOppMultiValueTest {
private String name;
private int value;
private StringOpp strOpp = new StringOpp();
public StringOppMultiValueTest(String name, int value) {
this.name = name;
this.value = value;
}
@Parameters
public static Collection<Object[]> getParis() {
Object[][] obj = { { "apple", 5 }, { "sonu", 4 }, { "ta<NAME>", 9 } };
return Arrays.asList(obj);
}
@Test
public void testcountCharInString() {
int result = strOpp.countCharInString(name);
assertEquals(value, result);
}
}
<file_sep>/junit/src/test/java/com/covalense/programtest/junit/MathOppTest.java
package com.covalense.programtest.junit;
import static org.junit.Assert.*;
import org.junit.Test;
import com.covalense.junit.MathOpp;
public class MathOppTest {
private MathOpp m = new MathOpp();
@Test
public void testAddMethod() {
assertEquals(15, m.add(10, 5));
}
@Test
public void testDivMethod() {
assertEquals(5, m.div(25, 5));
}
@Test(expected = ArithmeticException.class)
public void testDivForException() {
m.div(25,0);
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/abstraction/Pencil.java
package com.covalense.corejavaapp.abstraction;
public abstract class Pencil {
void draw() {
System.out.println("DRAW");
}
abstract void write();
abstract void color();
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/jodadatetime/LocalDateTest.java
package com.covalense.corejavaapp.jodadatetime;
import java.time.LocalDate;
import lombok.extern.java.Log;
@Log
public class LocalDateTest {
public static void main(String[] args) {
LocalDate a = LocalDate.now();
LocalDate a1 = LocalDate.now();
log.info("" + a);
log.info("" + a.getDayOfMonth());
log.info("" + a.getDayOfYear());
log.info("" + a.getMonthValue());
log.info("" + a.getYear());
log.info("" + a.getChronology());
log.info("" + a.getClass());
log.info("" + a.getDayOfWeek());
log.info("" + a.getEra());
log.info("" + a.getMonth());
log.info("" + a.lengthOfMonth());
log.info("" + a.compareTo(a1));
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/map/TestHashMapValue.java
package com.covalense.corejavaapp.map;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
public class TestHashMapValue {
public static void main(String[] args) {
HashMap<String, Integer> hm = new HashMap<String, Integer>();
hm.put("One", 1);
hm.put("Two", 2);
hm.put("Three", 3);
hm.put("Four", 4);
Collection<Integer> ci = hm.values();
Iterator<Integer> it = ci.iterator();
while (it.hasNext()) {
Integer r = it.next();
System.out.println(r);
}
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/map/TestHashMapKey.java
package com.covalense.corejavaapp.map;
import java.util.HashMap;
import java.util.Set;
public class TestHashMapKey {
public static void main(String[] args) {
HashMap<String, Integer> hm = new HashMap<String, Integer>();
hm.put("One", 1);
hm.put("Two", 2);
hm.put("Three", 3);
hm.put("Four", 4);
Set<String> ts = hm.keySet();
for (String s : ts) {
System.out.println(s);
}
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/collection/foreachloop/TestGenericArrayListC.java
package com.covalense.corejavaapp.collection.foreachloop;
import java.util.ArrayList;
import java.util.ListIterator;
public class TestGenericArrayListC {
public static void main(String[] args) {
ArrayList<Double> al = new ArrayList<Double>();
al.add(9.2);
al.add(2.0);
al.add(23.0);
al.add(25.6);
System.out.println("********Forward");
ListIterator<Double> it = al.listIterator();
while (it.hasNext()) {
Double d = it.next();
System.out.println(d);
}
System.out.println("********BackWard");
while (it.hasPrevious()) {
Double d = it.previous();
System.out.println(d);
}
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/javabean/TestAa.java
package com.covalense.corejavaapp.javabean;
public class TestAa {
public static void main(String[] args) {
Person p = new Person();
p.setName("Dinga");
p.setAge(25);
DB1 k = new DB1();
DB2 l = new DB2();
k.receive(p);
l.receive(p);
}
}
<file_sep>/Corejava Assignment/Assignment3/src/TestCar.java
public class TestCar {
public static void main(String[] args) {
Car c=new Car();
Driver d=new Driver();
Audi a=new Audi();
d.driving(a);
}
}
<file_sep>/AssesmentProgram/src/com/covalense/corejava/assesmenttest/ques2/SimpleInterestTest.java
package com.covalense.corejava.assesmenttest.ques2;
import lombok.extern.java.Log;
/**
*
* WAP to calculate simple interest using lambda expression
*
*/
@Log
public class SimpleInterestTest {
public static void main(String[] args) {
SimpleInterestCalculate s = (a, b, c) -> a * b * c / 100.0;
double afterCalculation = s.simpleInterest(100, 5, 6.6);
log.info("Simple Interest is " + afterCalculation);
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/collection/TestArrayListCar.java
package com.covalense.corejavaapp.collection;
import java.util.ArrayList;
import java.util.Collections;
public class TestArrayListCar {
public static void main(String[] args) {
ArrayList<Car> al = new ArrayList<Car>();
Car c1 = new Car();
c1.name = "Verna";
c1.brand = "Honda";
c1.cost = 1000000;
c1.rating = 6.5;
Car c2 = new Car();
c2.name = "A4";
c2.brand = "Audi";
c2.cost = 2000000;
c2.rating = 6.5;
Car c3 = new Car();
c3.name = "Eon";
c3.brand = "Honda";
c3.cost = 50000000;
c3.rating = 6.5;
Car c4 = new Car();
c4.name = "B1";
c4.brand = "Benz";
c4.cost = 600000000;
c4.rating = 6.5;
al.add(c1);
al.add(c2);
al.add(c3);
al.add(c4);
Collections.sort(al);
for (Car c : al) {
System.out.println("Name is " + c.name);
System.out.println("Brand is " + c.brand);
System.out.println("Rating is " + c.rating);
System.out.println("cost is " + c.cost);
System.out.println("****************");
}
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/passObject/TestChip.java
package com.covalense.corejavaapp.passobject;
public class TestChip {
public static void main(String[] args) {
Baby b=new Baby();
Lays l=new Lays();
Kurkure k =new Kurkure();
b.take(l);
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/expection/Paytm.java
package com.covalense.corejavaapp.expection;
public class Paytm {
void getTicket() {
System.out.println("Ticket Started");
IRCTC i = new IRCTC();
i.bookTicket();
try {
System.out.println(10 / 0);
} catch (ArithmeticException e) {
System.out.println("Exception Occur in paytm");
} finally {
System.out.println("Ticket ended");
}
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/threads/MarkerTest.java
package com.covalense.corejavaapp.threads;
public class MarkerTest {
public static void main(String[] args) {
Marker m=new Marker();
Thread t1=new Thread(m);
t1.start();
Thread t2=new Thread(m);
t2.start();
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/constructorrefernce/PenTest.java
package com.covalense.corejavaapp.constructorrefernce;
public class PenTest {
public static void main(String[] args) {
PenFactory f= Pen :: new;
Pen p=f.getPen();
p.write();
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/javabean/DB1.java
package com.covalense.corejavaapp.javabean;
public class DB1 {
void receive(Person p) {
System.out.println("**********DB1*************");
System.out.println(p.getName());
System.out.println(p.getAge());
}
}
<file_sep>/AssesmentProgram/src/com/covalense/corejava/assesmenttest/ques4/CustomExceptionTest.java
package com.covalense.corejava.assesmenttest.ques4;
import lombok.extern.java.Log;
/**
*
* WAP to create a custom exception to validate student eligibility for the
* exam.
*
*/
@Log
public class CustomExceptionTest {
public static void main(String[] args) {
ValidateException validateException = new ValidateException();
try {
validateException.validate(30);
} catch (Exception e) {
log.info(e.getMessage() + "");
}
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/expressionapp/TestString.java
package com.covalense.corejavaapp.expressionapp;
interface LengthCheck {
int StringLength(String st);
}
public class TestString {
public static void main(String[] args) {
LengthCheck lc = st -> st.length();
int i = lc.StringLength("Tajinder");
System.out.println("Length of String is " + i);
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/basicProgram/Even.java
package com.covalense.corejavaapp.basicprogram;
public class Even
{
public static void main(String[] args)
{
int n=5;
if(n%2==0)
{
System.out.println("Number is even "+n);
}
else
{
System.out.println("Number is odd "+n);
}
}
}
<file_sep>/Corejava Assignment/Assignment2/src/OddNum.java
public class OddNum {
public static void main(String[] args) {
for (int i = 1; i <= 100; i++) {
if (i % 2 !=0 ) {
if (i % 7 == 0) {
System.out.println("Odd Number That are divisible by " + i);
}
}
}
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/threads/Pencil1.java
package com.covalense.corejavaapp.threads;
<file_sep>/Corejava Assignment/Assignment6/src/com/covalense/collectionapp/assignment/EmployeeSort.java
package com.covalense.collectionapp.assignment;
public class EmployeeSort {
String name;
int id;
double salary;
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/abstraction/Google.java
package com.covalense.corejavaapp.abstraction;
public abstract class Google {
void login() {
System.out.println("Login Successfully");
}
abstract void shareDocumen();
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/Test.java
package com.covalense.corejavaapp;
public class Test {
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/interfaceTopic/Human.java
package com.covalense.corejavaapp.interfacetopic;
public interface Human {
void walk();
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/logger/TestRegister.java
package com.covalense.corejavaapp.logger;
import java.io.IOException;
import java.util.logging.FileHandler;
import java.util.logging.Level;
import java.util.logging.LogManager;
import java.util.logging.Logger;
import java.util.logging.SimpleFormatter;
public class TestRegister {
private static final Logger loger=Logger.getLogger("Hello");
public static void main(String[] args) {
LogManager.getLogManager().reset();
loger.setLevel(Level.ALL);
try {
FileHandler fh=new FileHandler("NewLogCreate.log",true);
fh.setLevel(Level.SEVERE);
fh.setFormatter(new SimpleFormatter());
loger.addHandler(fh);
loger.log(Level.SEVERE,"Hi i am SEVERE Main Method");
loger.log(Level.WARNING,"Hi i am WARNING Main Method");
loger.log(Level.CONFIG,"Hi i am CONFIG Main Method");
loger.log(Level.INFO,"Hi i am INFO Main Method");
loger.log(Level.FINE,"Hi i am FINE Main Method");
loger.log(Level.FINER,"Hi i am Main Method");
loger.log(Level.FINEST,"Hi i am Main Method");
Register r=new Register();
r.connectDB();
r.store();
} catch (SecurityException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/constructorrefernce/BoyTest.java
package com.covalense.corejavaapp.constructorrefernce;
import lombok.extern.java.Log;
@Log
public class BoyTest {
public static void main(String[] args) {
MyBoy mb = Boy::new;
Boy b = mb.getboy("Sonu", 1.74, 22);
log.info("Name is " + b.name);
log.info("Height is " + b.height);
log.info("Age is " + b.age);
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/passObject/TestDrink.java
package com.covalense.corejavaapp.passobject;
public class TestDrink {
public static void main(String[] args) {
WaterBottle wb=new WaterBottle();
DrinkerWater dw=new DrinkerWater();
dw.drink(wb);
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/expection/TestChecked.java
package com.covalense.corejavaapp.expection;
import java.io.File;
import java.io.IOException;
public class TestChecked {
public static void main(String[] args) {
File f = new File("Priya.txt");
try {
f.createNewFile();
} catch (IOException e) {
System.out.println("Checked exception");
}
}
}
<file_sep>/Corejava Assignment/Assignment2/src/EvenNum.java
public class EvenNum {
public static void main(String[] args) {
int a=100;
int i;
for ( i = 1; i <= a; i++) {
if (i % 2 ==0 ) {
if (i % 5 == 0) {
System.out.println("Even Number That are divisible by " + i);
}
}
}
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/constructor/Student.java
package com.covalense.corejavaapp.constructor;
public class Student {
String name;
int age;
double percentage;
public Student(String name,int age,double percentage)
{
this.name=name;
this.age=age;
this.percentage=percentage;
}
void Display()
{
System.out.println("Name is "+name);
System.out.println("Age is "+age);
System.out.println("Percentage is "+percentage);
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/array/DoubleArray.java
package com.covalense.corejavaapp.array;
public class DoubleArray {
public static void main(String[] args) {
double[] a=new double[4];
a[0]=2.0;
a[1]=28.9;
a[2]=15.5;
a[3]=90.6;
for (int i = 0; i < 4; i++) {
System.out.println(a[i]);
}
}
}
<file_sep>/Corejava Assignment/Assignment3/src/SearchEmployee.java
public class SearchEmployee {
void search(String name)
{
System.out.println("Search employee by name");
}
void search(int id)
{
System.out.println("Search employee by id");
}
}
<file_sep>/Corejava Assignment/Assignment4/src/com/covalense/stringapp/assigment/ReverseChar.java
package com.covalense.stringapp.assigment;
import java.util.logging.Logger;
public class ReverseChar {
private final static Logger loger = Logger.getLogger("Output");
public static void main(String[] args) {
String input = "how are you doing";
char ch[] = input.toCharArray();
String newString = "";
for (int i = 0; i < ch.length; i++) {
int k = i;
while (i < ch.length && ch[i] != ' ') {
i++;
}
int j = i - 1;
while (j >= k) {
newString = newString + ch[j];
j--;
}
newString = newString + " ";
}
loger.info(newString);
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/collection/foreachloop/TestArrayList.java
package com.covalense.corejavaapp.collection.foreachloop;
import java.util.ArrayList;
public class TestArrayList {
public static void main(String[] args) {
ArrayList al = new ArrayList();
al.add(15);
al.add(9.2);
al.add('A');
al.add("Riya");
al.add(105);
al.add(91.2);
for (int i = 0; i < al.size(); i++) {
/* System.out.println(al.get(i)); */
Object r=al.get(i);
System.out.println(r);
}
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/constructorrefernce/Pen.java
package com.covalense.corejavaapp.constructorrefernce;
import lombok.extern.java.Log;
@Log
public class Pen {
Pen() {
log.info("Zero Parameterized constructor");
}
void write() {
log.info("Pen write() method is call");
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/constructor/Aa.java
package com.covalense.corejavaapp.constructor;
public class Aa {
Aa(int a)
{
System.out.println("Constructor called "+a);
}
}
<file_sep>/Corejava Assignment/Assignment3/src/ChocolateIceCream.java
public class ChocolateIceCream extends IceCream {
void eat() {
System.out.println("Chocolate IceCream for baby ");
}
}
<file_sep>/HTML/grouping.html
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta http-equiv="X-UA-Compatible" content="ie=edge">
<title>formatting concept</title>
</head>
<body>
<div>
<h1>welcome to ELF Program</h1>
<p>Lorem ipsum dolor sit amet consectetur adipisicing elit. Rem quaerat, pariatur nobis
perspiciatis, voluptates est eos molestiae quam, illum in ex dolor!</p>
</div>
<div>
<p>Lorem ipsum dolor sit amet, consectetur adipisicing elit
Error et optio praesentium exercitationem itaque Lorem ipsum dolor sit amet, consectetur adipisicing elit.
Lorem ipsum dolor sit, amet consectetur adipisicing elit. Eos omnis amet similique tempora nihil? Ipsam, blanditiis! Repellendus, amet est. Dolore eveniet error possimus quibusdam in ea omnis voluptatum quas cupiditate, libero expedita porro modi optio consectetur fugit pariatur deserunt aperiam earum corporis aliquam, unde blanditiis obcaecati? Quos nisi tempore temporibus.
Omnis beatae quidem qui labore iure nulla dolor temporibus laborum dolore quas. obcaecati accusamus vel nostrum iure tempora.</p>
</div>
<span>
<button type="submit" >Submit</button>
</span>
<span>
<button type="reset">Reset</button>
</span>
<span>
<button disabled="disabled">Disabled</button>
</span>
</body>
</html><file_sep>/CoreJava/src/com/covalense/coreJavaApp/abstraction/TestATM.java
package com.covalense.corejavaapp.abstraction;
public class TestATM {
public static void main(String[] args) {
Machine m = new Machine();
ICICI ic = new ICICI();
HDFC hd = new HDFC();
SBI s = new SBI();
m.slot(ic);
System.out.println("****************************");
m.slot(hd);
System.out.println("****************************");
m.slot(s);
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/abstraction/SBI.java
package com.covalense.corejavaapp.abstraction;
public class SBI implements ATMCard {
public void validate() {
System.out.println("Card is Validate by SBI");
}
public void info() {
System.out.println("Information Check by SBI ");
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/implementlambda/GenderTest.java
package com.covalense.corejavaapp.implementlambda;
import lombok.extern.java.Log;
@Log
public class GenderTest {
public static void main(String[] args) {
log.info(Gender.MALE + "");
log.info(Gender.FEMALE.ordinal() + "");
Gender g = Gender.FEMALE;
switch (g) {
case MALE:
log.info("Yes Its male");
break;
case FEMALE:
log.info("Yes Its female");
break;
case OTHER:
log.info("Yes Its cross gender");
break;
default:
break;
}
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/expressionapp/StudentTest.java
package com.covalense.corejavaapp.expressionapp;
import java.util.function.Consumer;
import lombok.extern.java.Log;
@Log
public class StudentTest {
public static void main(String[] args) {
Student s1 = new Student("Sonu", 24, 20, 19);
Consumer<Student> c = s -> {
double avg = (s.m1 + s.m2 + s.m3) / 3.0;
log.info(s.name + " Average is " + avg);
};
c.accept(s1);
}
}
<file_sep>/Corejava Assignment/Assignment6/src/com/covalense/mapapp/assignment/Student.java
package com.covalense.mapapp.assignment;
public class Student {
String name;
int age;
double percentage;
@Override
public String toString() {
return "Student [name=" + name + ", age=" + age + ", percentage=" + percentage + "]";
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/colon/methodreferenceapp/MyMath.java
package com.covalense.corejavaapp.colon.methodreferenceapp;
public interface MyMath {
int add(int a, int b);
}
<file_sep>/Corejava Assignment/Assignment3/src/TestGift.java
public class TestGift {
public static void main(String[] args) {
Gift g=new Gift();
Receiver r=new Receiver();
MoblieGift mg=new MoblieGift();
r.receive(mg);
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/immutableClass/TestPerson1.java
package com.covalense.corejavaapp.immutableclass;
public class TestPerson1 {
public static void main(String[] args) {
Person p=new Person("Pooja", 22);
System.out.println("Name is "+p.getName());
System.out.println("Age is "+p.getAge());
Person s=new Person("Trisha", 18);
System.out.println("Name is "+s.getName());
System.out.println("Age is "+s.getAge());
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/overLoading/TestTrain.java
package com.covalense.corejavaapp.overloading;
public class TestTrain {
public static void main(String[] args)
{
Train t=new Train();
t.search(123456);
t.search("Any Express");
t.search("Express", 654321);
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/interfaceTopic/Printer.java
package com.covalense.corejavaapp.interfacetopic;
public interface Printer {
void print();
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/hasRelationShip/Person.java
package com.covalense.corejavaapp.hasrelationship;
public class Person {
int i=5;
Mobile m=new Mobile();
void walk() {
System.out.println("Walking");
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/passObject/DrinkerWater.java
package com.covalense.corejavaapp.passobject;
public class DrinkerWater {
void drink(WaterBottle wb)
{
wb.open();
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/abstraction/TestHonda.java
package com.covalense.corejavaapp.abstraction;
public class TestHonda {
public static void main(String[] args) {
Honda h = new Hero();
h.engine();
h.desgin();
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/colon/methodreferenceapp/MyFactorial.java
package com.covalense.corejavaapp.colon.methodreferenceapp;
public interface MyFactorial {
int getFact(int a);
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/inheritance/TestYantra.java
package com.covalense.corejavaapp.inheritance;
public class TestYantra {
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/interfaceTopic/Person.java
package com.covalense.corejavaapp.interfacetopic;
public class Person implements Animal,Human {
public void walk() {
System.out.println("Walk method");
}
public void eat() {
System.out.println("Eat method");
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/casting/TestD.java
package com.covalense.corejavaapp.casting;
public class TestD {
public static void main(String[] args) {
Animal a = new Cow();
Cow c = (Cow) a;
System.out.println(c.i = 5);
c.eat();
System.out.println(c.x = 6);
c.run();
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/basicProgram/Discount.java
package com.covalense.corejavaapp.basicprogram;
public class Discount
{
public static void main(String[] args)
{
double price=1500;
double discount=25;
int count=1;
if(count==1)
{
price=price-(price*discount)/100;
System.out.println(price);
}
System.out.println(price+" acutal price is you have pay in paytm");
}
}
<file_sep>/Corejava Assignment/Assignment6/src/com/covalense/collectionapp/assignment/CountObject.java
package com.covalense.collectionapp.assignment;
import lombok.extern.java.Log;
@Log
public class CountObject {
static int i;
CountObject() {
i++;
}
void display() {
log.info(i + "");
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/array/StringArray.java
package com.covalense.corejavaapp.array;
public class StringArray {
public static void main(String[] args) {
String[] s=new String[4];
s[0]="Sonu";
s[1]="Dikshith";
s[2]="Trisha";
s[3]="Tandeep";
for (int i = 0; i <=s.length; i++) {
System.out.println(s[i]);
}
System.out.println(s.length);
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/anotation/Cow.java
package com.covalense.corejavaapp.anotation;
public class Cow extends Animal {
@Override
void eat() {
System.out.println("Cow eat() method is call");
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/overLoading/Train.java
package com.covalense.corejavaapp.overloading;
public class Train {
void search(int i)
{
System.out.println("Search With Train number ");
}
void search(String s)
{
System.out.println("Search With Train Name ");
}
void search(String s,int i)
{
System.out.println("Search With Train name&number ");
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/constructor/TestAa.java
package com.covalense.corejavaapp.constructor;
public class TestAa {
public static void main(String[] args) {
new Aa(5);
new Aa(6);
new Aa(7);
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/collection/TestArrayList.java
package com.covalense.corejavaapp.collection;
import java.util.ArrayList;
public class TestArrayList {
public static void main(String[] args) {
ArrayList<Students> al = new ArrayList<Students>();
Students s1 = new Students();
s1.name = "Divya";
s1.id = 1;
s1.percentage = 78.98;
Students s2 = new Students();
s2.name = "Trsiha";
s2.id = 2;
s2.percentage = 81.98;
Students s3 = new Students();
s3.name = "Priya";
s3.id = 3;
s3.percentage = 95.98;
Students s4 = new Students();
s4.name = "Riya";
s4.id = 4;
s4.percentage = 54.98;
al.add(s1);
al.add(s2);
al.add(s3);
al.add(s4);
for (int i = 0; i < al.size(); i++) {
Students s = al.get(i);
System.out.println("name is " + s.name);
System.out.println("Id is " + s.id);
System.out.println("percentage is" + s.percentage);
System.out.println("**********************");
}
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/casting/TestP.java
package com.covalense.corejavaapp.casting;
public class TestP {
public static void main(String[] args) {
Pen p = new Marker();
Marker m = (Marker) p;
System.out.println(m.cost = 20);
m.write();
System.out.println(m.size = 2.0);
m.color();
}
}
<file_sep>/CoreJava/src/com/covalense/coreJavaApp/javabean/EmployeeBean.java
package com.covalense.corejavaapp.javabean;
import java.sql.Date;
public class EmployeeBean {
private String name;
private int id;
private String dept;
private int salary;
private int age;
private Date joiningDate;
private String gender;
public EmployeeBean() {
}
public EmployeeBean(String name, int id, String dept, int salary, int age, Date joiningDate, String gender) {
this.name = name;
this.id = id;
this.dept = dept;
this.salary = salary;
this.age = age;
this.joiningDate = joiningDate;
this.gender = gender;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getDept() {
return dept;
}
public void setDept(String dept) {
this.dept = dept;
}
public int getSalary() {
return salary;
}
public void setSalary(int salary) {
this.salary = salary;
}
public int getAge() {
return age;
}
public void setAge(int age) {
this.age = age;
}
public Date getJoiningDate() {
return joiningDate;
}
public void setJoiningDate(Date joiningDate) {
this.joiningDate = joiningDate;
}
public String getGender() {
return gender;
}
public void setGender(String gender) {
this.gender = gender;
}
}
<file_sep>/Corejava Assignment/Assignment3/src/Level1C.java
public class Level1C extends CarFuel {
void Fuel()
{
System.out.println("Car with 200L fuel");
}
}
|
103302d6344baca20266321352e60617f32602d9
|
[
"Java",
"HTML"
] | 127 |
Java
|
TajinderSingh29/ELF-06june19-Covalense-TajinderSandhu
|
00e846873a6ab567c19ed60e3c7b00ba6e3cb054
|
81a9e0b5421ac483ef15effaed384ada141102c4
|
refs/heads/master
|
<repo_name>47pik/quantathon<file_sep>/p4.py
import output as op
import observations as ob
from metrics import *
if __name__ == '__main__':
#generate time series
ts_ret = ob.ts_return(RP4)
ts_cum_ret = ob.ts_cum_return(RP4)
ts_mean_abs_w = ob.ts_mean_abs_weight(W4, fill_fn= FILL4)
ts_port_dir = ob.ts_portfolio_dir(W4, fill_fn= FILL4)
#generate deliverable results
op.print_deliverables(ts_ret, ts_cum_ret, AvrROC)
#plot data
op.plot_portfolio_dynamics(2, rd.T, ts_ret, ts_cum_ret, ts_mean_abs_w, ts_port_dir)
#generate data_matrix for output
data_matrix = op.generate_data_matrix(rd.T, rd.N, rd.stock_dict, \
rd.date_dict, ts_ret, ts_cum_ret, ts_mean_abs_w, ts_port_dir, W4)
#output data to CSV
op.generate_csv_data('data_part4.team_B.csv', data_matrix)<file_sep>/garbage.py
import numpy as np
import scipy.optimize
from metrics import *
import random
if __name__ == '__main__':
f = sharpe3
sh_dic = {}
#optimize for 3 weights
for i in range(15):
params = [random.uniform(-0.5, 0.5) for i in range(3)]
r = scipy.optimize.minimize(f, np.array(params),
options={'maxiter':100, 'disp':True})
print(str(tuple(params)))
print(r.x)
print(r.fun)
sh_dic[i] = [tuple(params), r.x, r.fun]
#if there's a pattern, try a few of that pattern
#for pw in range(-4, 0):
#print(pw)
#pattern is probably not this!!!!
#b = (2 ** pw)
#a = b * (-0.38)
#c = b * (-0.39)
#params = [random.uniform(-1, 1) for i in range(3)]
#r = scipy.optimize.minimize(f, np.array(params),
#options={'maxiter':100, 'disp':True})
#print(str(tuple(params)))
#print(r.x)
#print(r.fun)
#sh_dic[pw] = [tuple(params), r.x, r.fun]
#explore deeper using your favourite 12 weight combo
#optimal from part 2 (ish)
#params = [-0.11812008, -3.05744312, 3.798748, -2.05990281, 0.39163579, 3.8577401,\
#-4.32966829, -3.93373867, -0.46817134, -2.17563513, 2.48775916, 2.48504554]
#r = scipy.optimize.minimize(f, np.array(params),\
# options={'maxiter':1000, 'disp':True})
#print(r.x)
#print(r.fun)
#print(str(tuple(params)))<file_sep>/observations.py
import numpy as np
from metrics import *
def ts_return(r_fn, coeffs=None):
'''Return list representing the time-series of the long-short return'''
ret_pf = [99, 99]
for t in range(2, rd.T):
if coeffs:
rp = r_fn(t, coeffs)
else:
rp = r_fn(t)
ret_pf.append(rp)
return ret_pf
#check indicies
def ts_cum_return(r_fn, coeffs=None):
'''Return list representing the time-series of cumulative long-short return'''
cumR = [99, 99]
ret_pf = ts_return(r_fn, coeffs)
rp2 = [x + 1 for x in ret_pf]
for t in range(2, rd.T):
cumR.append(np.log(np.prod(rp2[2: t + 1])))
return cumR
def ts_mean_abs_weight(w_fn, coeffs=None, fill_fn=None):
'''Return list representing the time series of mean absolute weight'''
mean_abs_weight = [99, 99]
if coeffs:
extra_arg = [coeffs]
else:
extra_arg = []
for t in range(2, rd.T):
weights = np.array([w_fn(*([t, j] + extra_arg)) for j in rd.stock_dict])
#apply fill fn if given
if fill_fn:
fills = []
i = 0
for j in rd.stock_dict:
fills.append(fill_fn(t, j, weights[i]))
i += 1
#fills = np.array([fill_fn(t, j, weights) for j in rd.stock_dict])
weights = weights * fills
abs_weights = map(abs, weights)
mean_abs_weight.append(sum(abs_weights) / float(rd.N))
return mean_abs_weight
def ts_portfolio_dir(w_fn, coeffs=None, fill_fn=None):
'''Return list representing the time series of portfolio direction'''
port_dir = [99, 99]
if coeffs:
extra_arg = [coeffs]
else:
extra_arg = []
for t in range(2, rd.T):
weights = np.array([w_fn(*([t, j] + extra_arg)) for j in rd.stock_dict])
#apply fill fn if given
if fill_fn:
fills = []
i = 0
for j in rd.stock_dict:
fills.append(fill_fn(t, j, weights[i]))
i += 1
#fills = np.array([fill_fn(t, j, weights) for j in rd.stock_dict])
weights = weights * fills
abs_weights = map(abs, weights)
denom = sum(abs_weights)
if denom == 0:
res = 0
else:
res = round(sum(weights), 15) / sum(abs_weights)
port_dir.append(res)
return port_dir
##???
#eqLO = []
#rccs = [np.mean([RCC(t, j) for j in rd.stock_dict]) + 1 for t in range(rd.T)]
#for t in range(rd.T):
#eqLO.append(np.log(np.prod(rccs[2: t + 1])))<file_sep>/p3.py
import output as op
import observations as ob
from metrics import *
if __name__ == '__main__':
#define params
param_names = ['b1', 'b2', 'b3', 'b4', 'b5', 'b6', 'b7', 'b8', 'b9', 'b10', 'b11', 'b12']
param_vals = [-1.25,-1.25,-1.25,-1.25,1.85,1.85,1.85,1.85,-0.11,-0.11,-0.11,-0.11]
#generate time series
ts_ret = ob.ts_return(RP3, coeffs=param_vals)
ts_cum_ret = ob.ts_cum_return(RP3, coeffs=param_vals)
ts_mean_abs_w = ob.ts_mean_abs_weight(W3_wrapper, coeffs=param_vals, fill_fn = FILL3)
ts_port_dir = ob.ts_portfolio_dir(W3_wrapper, coeffs=param_vals, fill_fn= FILL3)
#generate deliverable results
op.print_deliverables(ts_ret, ts_cum_ret, AvrROC)
#plot data
op.plot_portfolio_dynamics(2, rd.T, ts_ret, ts_cum_ret, ts_mean_abs_w, ts_port_dir)
#generate data_matrix for output
data_matrix = op.generate_data_matrix(rd.T, rd.N, rd.stock_dict, \
rd.date_dict, ts_ret, ts_cum_ret, ts_mean_abs_w, ts_port_dir, W3_wrapper, coeffs=param_vals)
#output data to CSV
op.generate_csv_data('data_part3.team_B.csv', data_matrix)
#output coefficients to CSV
op.generate_csv_coeff('coeff_part3.team_B.csv', param_names, param_vals)<file_sep>/read_data.py
import csv
import os
import numpy as np
N = 100
T = 1003
#access attribute i of stock s at column s * 6 + i
#Attributes:
#1: SO
#2: SH
#3: SL
#4: SC
#5: TVL
#6: IND
def get_data():
'''Return data as numpy array and dictionary of numpy arrays.
dat is numpy array with 602 columns and 1003 rows. Each row is a day of data.
Column 0 is the date e.g. 20010601, column 601 is a number representing the date
e.g. Jan 1, 2000 is 0, Dec 31 2003 is 1002.
Other 600 columns are divided as follows:
Columns 1-6 are relevant to stock 0, 7-12 are relevant to stock 1 ...
595-600 are relevant to stock 99. These numbers are, in this order:
SO (opening price), intraday high (SH), intraday low (SL),
closing price (SC), trading volume (TVL), trade direction indicator (IND).
stock_dict is a dictionary where the values are these 6 column tables
described above. The keys are 's0', 's1'...'s99'. The final column is the
date in int format (0,1,2, ... 1002).'''
#os.chdir('C:\Users\47pik\Documents\GitHub\quantathon\')
rdr = csv.reader(open(os.path.join('data', 'in_sample_data.csv'), 'r'))
x=list(rdr)
dat=np.array(x).astype('float')
nums = np.zeros((T, 1))
for i in range(T):
nums[i,0] = i
dat = np.append(dat, nums, axis=1)
stock_dict = {}
for i in range(N):
stock_name = 's' + str(i)
stock_table = dat[:,[-1] + range(i * 6 + 1, i * 6 + 6 + 1)].copy()
stock_dict[stock_name] = stock_table
date_dict = {}
for i in range(T):
date_dict[i] = dat[i,0]
return dat, stock_dict, date_dict
dat, stock_dict, date_dict = get_data()<file_sep>/metrics.py
import numpy as np
import read_data as rd
import time
from collections import Counter
#from sklearn import linear_model as lm
import scipy.stats as ss
import sys
#Caches
SOd = {}; SHd = {}; SLd = {}; SCd = {}; TVLd = {}; AvrTVLd = {}; INDd = {}
W1d = {}; AvrRCCd = {}; RCCd = {}; RP1d = {};
W2d = {}; RCOd = {}; ROCd = {}; ROOd = {}; RVPd = {}; RP2d = {}
AvrRCOd = {}; AvrROCd= {}; AvrROOd = {}; AvrRVPd = {}
W3d = {}; FILL3d = {}; RP3d = {}
W4d = {}; FILL4d = {}; RP4d = {}
ret10d = {}; lowd = {}; highd = {}; ressupd = {}; movavgd = {}
#Part 1
def AvrRCC(t):
'''Returns the equalweighted average close-to-close return across all stocks
on day t'''
if t in AvrRCCd: return AvrRCCd[t]
res = np.mean([RCC(t, j) for j in rd.stock_dict])
AvrRCCd[t] = res
return res
def RCC(t, j):
'''Returns close-to-close return of stock j on day t'''
if (t, j) in RCCd: return RCCd[(t,j)]
res =(SC(t,j)/SC(t-1, j)) - 1
RCCd[(t, j)] = res
return res
def W1(t,j):
'''Returns weights for stock j on day t for Part 1'''
if (t, j) in W1d: return W1d[(t,j)]
res = - (1.0 / rd.N) * (RCC(t - 1, j) - AvrRCC(t - 1))
W1d[(t,j)] = res
return res
def RP1(t):
'''Returns close-to-close portfolio for day t'''
if t in RP1d: return RP1d[t]
res = sum([W1(t, j) * RCC(t, j) for j in rd.stock_dict]) / \
sum([abs(W1(t, j)) for j in rd.stock_dict])
RP1d[t] = res
return res
#Part 2
def RCO(t, j):
'''Return close-to-open return of stock j on day t'''
if (t, j) in RCOd: return RCOd[(t, j)]
res = (SO(t, j) / SC(t - 1, j)) - 1
RCOd[(t, j)] = res
return res
def AvrRCO(t):
'''Return average close-to-open return of all stocks on day t'''
if t in AvrRCOd: return AvrRCOd[t]
res = np.mean([RCO(t, j) for j in rd.stock_dict])
AvrRCOd[t] = res
return res
def ROC(t, j):
'''Return open-to-close return of stock j on day t'''
if (t, j) in ROCd: return ROCd[(t, j)]
res = (SC(t, j) / float(SO(t, j))) - 1
ROCd[(t, j)] = res
return res
def AvrROC(t):
'''Return average open-to-close return of all stocks on day t'''
if t in AvrROCd: return AvrROCd[t]
res = np.mean([ROC(t, j) for j in rd.stock_dict])
AvrROCd[t] = res
return res
def ROO(t, j):
'''Return open-to-open return of stock j on day t'''
if (t, j) in ROOd: return ROOd[(t, j)]
res = (SO(t, j) / float(SO(t-1, j))) - 1
ROOd[(t, j)] = res
return res
def AvrROO(t):
'''Return average open-to-open return of all stocks on day t'''
if t in AvrROOd: return AvrROOd[t]
res = np.mean([ROO(t, j) for j in rd.stock_dict])
AvrROOd[t] = res
return res
def RVP(t, j):
'''Return range based proxy for variance of stock j on day t'''
if (t, j) in RVPd: return RVPd[(t, j)]
res = (1/(float(4*np.log(2))))*((np.log(SH(t, j)) - np.log(SL(t, j)))**2)
RVPd[(t, j)] = res
return res
def AvrRVP(t, j):
'''Return average RVP for stock j for 200 days prior to day t'''
if (t, j) in AvrRVPd: return AvrRVPd[(t, j)]
res = np.mean([RVP(t, j) for t in range(max(1, t-200), t+1)])
AvrRVPd[(t, j)] = res
return res
def W2(t, j):
'''Returns weights for stock j on day t for Part 2'''
if (t, j) in W2d: return W2d[(t, j)]
n = float(rd.N)
relative_tvl = TVL(t-1,j) / float(AvrTVL(t-1,j))
relative_rvp = RVP(t-1,j) / float(AvrRVP(t-1,j))
terms = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
terms[0] = (RCC(t-1,j) - AvrRCC(t-1))
terms[1] = (ROO(t,j) - AvrROO(t))
terms[2] = (ROC(t-1,j) - AvrROC(t-1))
terms[3] = (RCO(t,j) - AvrRCO(t))
terms[4] = relative_tvl * terms[0]
terms[5] = relative_tvl * terms[1]
terms[6] = relative_tvl * terms[2]
terms[7] = relative_tvl * terms[3]
terms[8] = relative_rvp * terms[0]
terms[9] = relative_rvp * terms[1]
terms[10] = relative_rvp * terms[2]
terms[11] = relative_rvp * terms[3]
terms = np.array([x / n for x in terms])
W2d[(t, j)] = terms
return terms
def W2p(t, j):
'''Returns weights for stock j on day t for Part 2'''
if (t, j) in W2d: return W2d[(t, j)]
n = float(rd.N)
relative_tvl = TVL(t-1,j) / float(AvrTVL(t-1,j))
relative_rvp = RVP(t-1,j) / float(AvrRVP(t-1,j))
terms = [0, 0, 0]
rcc = (RCC(t-1,j) - AvrRCC(t-1))
roo = (ROO(t,j) - AvrROO(t))
roc = (ROC(t-1,j) - AvrROC(t-1))
#rco = (RCO(t,j) - AvrRCO(t))
r_avg = (rcc + roo + roc) / 3
terms[0] = r_avg
terms[1] = relative_tvl * r_avg
terms[2] = relative_rvp * r_avg
terms = np.array([x / n for x in terms])
W2d[(t, j)] = terms
return terms
def W2_wrapper(t,j, parameters):
terms = W2(t, j)
product = parameters * terms
return np.sum(product)
def W2p_wrapper(t,j, parameters):
terms = W2p(t, j)
product = parameters * terms
return np.sum(product)
def RP2(t, parameters):
'''Returns open-to-close portfolio for day t'''
if len(parameters) == 12:
w = [W2_wrapper(t, j, parameters) for j in rd.stock_dict]
else:
w = [W2p_wrapper(t, j, parameters) for j in rd.stock_dict]
rocs = [ROC(t, j) for j in rd.stock_dict]
res = np.dot(w, rocs) / sum(map(abs, w))
return res
def sharpe2(parameters):
rfn = RP2
rps = [rfn(t, parameters) for t in range(2, rd.T)]
return -np.mean(rps) / np.std(rps)
#Part 3
def W3(t, j):
'''Returns weights for stock j on day t for Part 3'''
if (t, j) in W3d: return W3d[(t, j)]
n = float(rd.N)
relative_tvl = TVL(t-1,j) / float(AvrTVL(t-1,j))
relative_rvp = RVP(t-1,j) / float(AvrRVP(t-1,j))
terms = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
terms[0] = (RCC(t-1,j) - AvrRCC(t-1))
terms[1] = (ROO(t,j) - AvrROO(t))
terms[2] = (ROC(t-1,j) - AvrROC(t-1))
terms[3] = (RCO(t,j) - AvrRCO(t))
terms[4] = relative_tvl * terms[0]
terms[5] = relative_tvl * terms[1]
terms[6] = relative_tvl * terms[2]
terms[7] = relative_tvl * terms[3]
terms[8] = relative_rvp * terms[0]
terms[9] = relative_rvp * terms[1]
terms[10] = relative_rvp * terms[2]
terms[11] = relative_rvp * terms[3]
terms = np.array([x / n for x in terms])
W3d[(t, j)] = terms
return terms
def W3p(t, j):
'''Returns weights for stock j on day t for Part 3'''
if (t, j) in W3d: return W3d[(t, j)]
n = float(rd.N)
relative_tvl = TVL(t-1,j) / float(AvrTVL(t-1,j))
relative_rvp = RVP(t-1,j) / float(AvrRVP(t-1,j))
terms = [0, 0, 0]
rcc = (RCC(t-1,j) - AvrRCC(t-1))
roo = (ROO(t,j) - AvrROO(t))
roc = (ROC(t-1,j) - AvrROC(t-1))
#rco = (RCO(t,j) - AvrRCO(t))
r_avg = (rcc + roo + roc) / 3
terms[0] = r_avg
terms[1] = relative_tvl * r_avg
terms[2] = relative_rvp * r_avg
terms = np.array([x / n for x in terms])
W3d[(t, j)] = terms
return terms
def W3p_wrapper(t,j, parameters):
terms = W3p(t, j)
product = parameters * terms
return np.sum(product)
def FILL3(t, j, w):
'''Returns 1 if stock j on day t is able to be filled on according to W3, 0 otherwise'''
sgn = np.sign(w)
if (t, j, sgn) in FILL3d: return FILL3d[(t, j, sgn)]
if (sgn * IND(t, j)) >= 0:
res = 1
else:
res = 0
FILL3d[(t, j, sgn)] = res
return res
def W3_wrapper(t,j, parameters):
terms = W3(t, j)
product = parameters * terms
return np.sum(product)
def RP3(t, parameters):
'''Returns open-to-close portfolio for day t taking fill conditions into account'''
if len(parameters) == 12:
w = [W3_wrapper(t, j, parameters) for j in rd.stock_dict]
else:
w = [W3p_wrapper(t, j, parameters) for j in rd.stock_dict]
fills = []
i = 0
for j in rd.stock_dict:
fills.append(FILL3(t, j, w[i]))
i+= 1
#fills = [FILL3(t, j, w) for j in rd.stock_dict]
rocs = [ROC(t, j) for j in rd.stock_dict]
term = np.array(w) * np.array(fills)
denom = sum(map(abs, term))
if denom == 0:
res = 0
else:
res = sum(term * rocs) / sum(map(abs, term))
return res
def sharpe3(parameters):
rfn = RP3
rps = [rfn(t, parameters) for t in range(2, rd.T)]
return -np.mean(rps) / np.std(rps)
#Part 4
def W4(t, j):
'''Returns weights for stock j on day t for Part 4'''
if (t, j) in W4d: return W4d[(t,j)]
#res = ret10(t, j)
#if TVL(t - 1, j) > AvrTVL(t - 1, j) * 1.5:
#res = -ret10(t - 1, j)
#else:
#res = 0
res = ressup(t, j)
W4d[(t,j)] = res
return res
def ret10(t, j):
if (t, j) in ret10d: return ret10d[(t,j)]
res = np.prod([1 + RCC(i, j) for i in range(t-1)])/ \
np.prod([1 + AvrRCC(i) for i in range(t-1)])- 1
ret10d[(t,j)] = res
return res
def ressup(t, j):
alpha_high = 0.01 #selling
alpha_low = 0.0615 #buying
beta = 1
if (t, j) in ressupd: return ressupd[(t, j)]
L = low(t, j); H = high(t, j)
pr = SO(t, j)
if pr < ((1 + alpha_low) * L):
res = beta
elif pr > ((1 - alpha_high) * H):
res = -beta
else:
res = 0
ressupd[(t, j)] = res
return res
def low(t, j):
if (t, j) in lowd: return lowd[(t, j)]
L = min([SL(i, j) for i in range(max(1, t-20), t+1)]) #orig said t-200
lowd[(t, j)] = L
return L
def high(t, j):
if (t, j) in highd: return highd[(t, j)]
H = max([SH(i, j) for i in range(max(1, t-20), t+1)])
highd[(t, j)] = H
return H
def movavg(t, j):
'''Returns moving average for stock j for 200 days prior to day t'''
if (t, j) in movavgd: return movavgd[(t, j)]
longavg = np.mean([SC(i, j) for i in range(max(1, t-200), t+1)])
if SC(t - 1, j) > longavg:
res = SC(t - 1, j)
else:
res = -SC(t - 1, j)
movavgd[(t, j)] = res
return res
def FILL4(t, j, weights=None):
'''Returns 1 if stock j on day t is able to be filled on according to W4, 0 otherwise'''
if (t, j) in FILL4d: return FILL4d[(t, j)]
if (W4(t, j) * IND(t, j)) >= 0:
res = 1
else:
res = 0
FILL4d[(t,j)] = res
return res
def RP4(t):
'''Returns open-to-close portfolio for day t taking fill conditions into account'''
w = [W4(t, j) for j in rd.stock_dict]
fills = [FILL4(t, j) for j in rd.stock_dict]
rocs = [ROC(t, j) for j in rd.stock_dict]
term = np.array(w) * np.array(fills)
denom = sum(map(abs, term))
if denom == 0:
res = 0
else:
res = sum(term * rocs) / sum(map(abs, term))
return res
def sharpe_fast(rps):
return -np.mean(rps) / np.std(rps)
#Utility
def SO(t, j):
'''Returns opening price of stock j on day t'''
if (t, j) in SOd: return SOd[(t, j)]
stock_data = rd.stock_dict[j]
opening = stock_data[t][1]
SOd[(t, j)] = opening
return opening
def SH(t, j):
'''Returns intraday high of stock j on day t'''
if (t, j) in SHd: return SHd[(t, j)]
stock_data = rd.stock_dict[j]
high = stock_data[t][2]
SHd[(t, j)] = high
return high
def SL(t, j):
'''Returns intraday low of stock j on day t'''
if (t, j) in SLd: return SLd[(t, j)]
stock_data = rd.stock_dict[j]
low = stock_data[t][3]
SLd[(t, j)] = low
return low
def SC(t, j):
'''Returns closing price of stock j on day t'''
if (t, j) in SCd: return SCd[(t, j)]
stock_data = rd.stock_dict[j]
closing = stock_data[t][4]
SCd[(t, j)] = closing
return closing
def TVL(t, j):
'''Returns trading volume of stock j on day t'''
if (t, j) in TVLd: return TVLd[(t, j)]
stock_data = rd.stock_dict[j]
volume = stock_data[t][5]
TVLd[(t, j)] = volume
return volume
def AvrTVL(t, j):
'''Returns average trading volume for stock j for 200 days prior to day t'''
if (t, j) in AvrTVLd: return AvrTVLd[(t, j)]
res = np.mean([TVL(t, j) for t in range(max(1, t-200), t+1)])
AvrTVLd[(t, j)] = res
return res
def IND(t, j):
'''Returns trade direction indicator of stock j on day t'''
if (t, j) in INDd: return INDd[(t, j)]
stock_data = rd.stock_dict[j]
indicator = stock_data[t][6]
INDd[(t, j)] = indicator
return indicator
if __name__ == '__main__':
pass
##parameters = [10,2,3,4,5,6,7,8,1,2,3,4]
#params = [-0.11812008, -3.05744312, 3.798748, -2.05990281, 0.39163579, 3.8577401,\
#-4.32966829, -3.93373867, -0.46817134, -2.17563513, 2.48775916, 2.48504554]
#st = time.time(); s = sharpe3(params); end = time.time(); print(end - st)
##mnROO = [np.mean([ROO(t,j) for j in rd.stock_dict]) for t in range(rd.T)]
##mnRCC = [np.mean([RCC(t,j) for j in rd.stock_dict]) for t in range(rd.T)]
##mnROC = [np.mean([ROC(t,j) for j in rd.stock_dict]) for t in range(rd.T)]
##mnRCO = [np.mean([RCO(t,j) for j in rd.stock_dict]) for t in range(rd.T)]
#sys.exit()
#rp4s = [RP4(t) for t in range(11, rd.T)]
#i10 = {}
#cd = {}
#j = 's6'
#for j in rd.stock_dict:
##r10 = [ret10(t, j) for t in range(11, rd.T)]
#ind = np.reshape(rd.stock_dict[j][:,6], (1003, 1))
##ind10 = ind[11:,:]
##pos10i = filter(lambda i: r10[i] > 0, range(rd.T - 11))
##pos10 = [int(ind10[i]) for i in pos10i]
###print(Counter(pos10))
##neg10i = filter(lambda i: r10[i] < 0, range(rd.T - 11))
##neg10 = [int(ind10[i]) for i in neg10i]
###print(Counter(neg10))
##i10[j] = (Counter(pos10), Counter(neg10))
#roc = np.reshape(np.array([ROC(t, j) for t in range(rd.T)]), (1003, 1))
##l = lm.LinearRegression()
##l.fit(roc, ind)
#x = ss.linregress(np.transpose(roc)[0], np.transpose(ind)[0])
#cd[j] = x
#coefs = [cd[j].slope for j in cd]
#errs = [cd[j].stderr for j in cd]
#np.median(coefs)
#np.median(errs)
#pv = [cd[j].pvalue for j in cd]
#np.median(pv)
#pvi = [(i, pv[i]) for i in range(100)]
#spvi = sorted(pvi, key=lambda x:x[1])
#sigcoefs = [[x[0], x[1], coefs[x[0]]] for x in spvi[:23]]
<file_sep>/output.py
import csv
import os
import deliverable_results as dr
import matplotlib.pyplot as plt
def print_deliverables(ts_ret, ts_cum_ret, avg_fn):
'''Calculate and print values for deliverables'''
print('Average Daily Log Returns: ' + str(dr.avg_daily_log_ret(ts_ret)))
print('Std Dev of Daily Log Returns: ' + str(dr.std_daily_log_ret(ts_ret)))
print('Annualized Sharpe Ratio: ' + str(dr.annualized_sr(ts_ret)))
print('Skewness: ' + str(dr.skewness(ts_ret)))
print('Excess Kurtosis: ' + str(dr.excess_kurtosis(ts_ret)))
print('Max Drawdown Duration: ' + str(dr.max_drawdown(ts_cum_ret)[0]))
print('Max Drawdown Loss: ' + str(dr.max_drawdown(ts_cum_ret)[1]))
print('Equal Weight Correlation: ' + str(dr.equal_weight_corr(ts_ret, avg_fn)))
def plot_portfolio_dynamics(r_low, r_high, ts_ret, ts_cum_ret, ts_mean_abs_w, ts_port_dir):
'''Plot data for deliverables'''
f, axarr = plt.subplots(4, sharex=True)
axarr[0].plot(range(r_low, r_high), ts_ret[r_low:])
axarr[0].set_title('Long-short return')
axarr[1].plot(range(r_low, r_high), ts_cum_ret[r_low:])
axarr[1].set_title('Cumulative long-short return')
axarr[2].plot(range(r_low, r_high), ts_mean_abs_w[r_low:])
axarr[2].set_title('Mean absolute weight')
axarr[3].plot(range(r_low, r_high), ts_port_dir[r_low:])
axarr[3].set_title('Portfolio direction')
plt.xlabel('Day t')
plt.xlim(2, 1002)
plt.show()
def generate_data_matrix(T, N, stock_dict, date_dict, ts_ret, ts_cum_ret, ts_mean_abs_w, ts_port_dir, W_fn, coeffs=None):
'''Generate a matrix of data to be exported as CSV'''
data_matrix = []
for t in range(0, T):
row = []
row.append(date_dict[t])
row.append(ts_ret[t])
row.append(ts_cum_ret[t])
row.append(ts_mean_abs_w[t])
row.append(ts_port_dir[t])
if t >= 2:
if coeffs:
row += [W_fn(t, 's' + str(i), coeffs) for i in range(0, N)]
else:
row += [W_fn(t, 's' + str(i)) for i in range(0, N)]
else:
row += [99 for j in stock_dict]
map(str, row)
data_matrix.append(row)
return data_matrix
def generate_csv_data(filename, data_matrix):
'''Output a matrix of data as a csv'''
with open(os.path.join('deliverables', filename), 'wb+') as csvfile:
writer = csv.writer(csvfile, delimiter=',', quotechar='|', quoting=csv.QUOTE_MINIMAL)
header = ['yyyymmdd', 'return', 'cumulative_return', 'mean_abs_weight', 'portfolio_direction']
stocks = ['Stock_' + str(i) for i in range(0, 100)]
writer.writerow(header + stocks)
writer.writerows(data_matrix)
def generate_csv_coeff(filename, coeff_names, coeff_vals):
'''Output a series of coefficients as a csv'''
with open(os.path.join('deliverables', filename), 'wb+') as csvfile:
writer = csv.writer(csvfile, delimiter=',', quotechar='|', quoting=csv.QUOTE_MINIMAL)
writer.writerow(coeff_names)
writer.writerow(coeff_vals) <file_sep>/deliverable_results.py
import numpy as np
import scipy as sp
def avg_daily_log_ret(ts_ret):
'''Returns average daily log returns'''
log_rets = []
for ts in ts_ret[2:]:
log_rets.append(np.log(1 + ts))
return np.mean(log_rets)
def std_daily_log_ret(ts_ret):
'''Returns std dev of daily log returns'''
log_rets = []
for ts in ts_ret[2:]:
log_rets.append(np.log(1 + ts))
return np.std(log_rets, ddof=1)
def annualized_sr(ts_ret):
'''Returns annualized sharpe ratio'''
mean = np.mean(ts_ret[2:])
std = np.std(ts_ret[2:], ddof=1)
return (float(mean) / std) * np.sqrt(252)
def skewness(ts_ret):
'''Return the skewness of returns on the strategy'''
return sp.stats.skew(ts_ret[2:])
def excess_kurtosis(ts_ret):
'''Return the excess kurtosis of returns on the strategy'''
return sp.stats.kurtosis(ts_ret[2:])
def max_drawdown(ts_cum_ret):
'''Return the maximum drawdown'''
curr_peak = ts_cum_ret[2] + 1
curr_peak_i = 2
curr_valley = ts_cum_ret[2] + 1
curr_valley_i = 2
mdd_peak = curr_peak
mdd_peak_i = 2
mdd_valley = curr_valley
mdd_valley_i = 2
mdd_mag = 0
#mdd_percent = 0
for i in range(2, len(ts_cum_ret)):
curr = ts_cum_ret[i] + 1
if curr < curr_valley:
curr_valley = curr
curr_valley_i = i
if (curr_peak - curr_valley) > mdd_mag:
#if (curr_peak - curr_valley) / curr_peak > mdd_percent:
mdd_peak = curr_peak
mdd_peak_i = curr_peak_i
mdd_valley = curr_valley
mdd_valley_i = curr_valley_i
#mdd_percent = (mdd_peak - mdd_valley) / mdd_peak
mdd_mag = mdd_peak - mdd_valley
elif curr > curr_peak:
curr_peak = curr
curr_peak_i = i
curr_valley = curr
curr_valley_i = i
duration = mdd_valley_i - mdd_peak_i
#return (duration, -mdd_percent)
return(duration, -mdd_mag)
def equal_weight_corr(ts_ret, avg_fn):
'''Return correlation between strategy and equal weight long portfolio'''
eq_weight_ret = [avg_fn(t) for t in range(2, len(ts_ret))]
return np.correlate(ts_ret[2:], eq_weight_ret)[0]<file_sep>/p2.py
import output as op
import observations as ob
from metrics import *
if __name__ == '__main__':
#define params
param_names = ['a1', 'a2', 'a3', 'a4', 'a5', 'a6', 'a7', 'a8', 'a9', 'a10', 'a11', 'a12']
param_vals = [-0.11812008, -3.05744312, 3.798748, -2.05990281, 0.39163579, 3.8577401,\
-4.32966829, -3.93373867, -0.46817134, -2.17563513, 2.48775916, 2.48504554]
#generate time series
ts_ret = ob.ts_return(RP2, coeffs=param_vals)
ts_cum_ret = ob.ts_cum_return(RP2, coeffs=param_vals)
ts_mean_abs_w = ob.ts_mean_abs_weight(W2_wrapper, coeffs=param_vals)
ts_port_dir = ob.ts_portfolio_dir(W2_wrapper, coeffs=param_vals)
#generate deliverable results
op.print_deliverables(ts_ret, ts_cum_ret, AvrROC)
#plot data
op.plot_portfolio_dynamics(2, rd.T, ts_ret, ts_cum_ret, ts_mean_abs_w, ts_port_dir)
#generate data_matrix for output
data_matrix = op.generate_data_matrix(rd.T, rd.N, rd.stock_dict, \
rd.date_dict, ts_ret, ts_cum_ret, ts_mean_abs_w, ts_port_dir, W2_wrapper, coeffs=param_vals)
#output data to CSV
op.generate_csv_data('data_part2.team_B.csv', data_matrix)
#output coefficients to CSV
op.generate_csv_coeff('coeff_part2.team_B.csv', param_names, param_vals)
##performs at -0.064784872847161015 with IND
##three param version performs at -0.033414734359919554 with IND
##r = scipy.optimize.minimize(f, np.array(params),\
## options={'maxiter':1000, 'disp':True})
<file_sep>/p1.py
import output as op
import observations as ob
from metrics import *
if __name__ == '__main__':
#generate time series
ts_ret = ob.ts_return(RP1)
ts_cum_ret = ob.ts_cum_return(RP1)
ts_mean_abs_w = ob.ts_mean_abs_weight(W1)
ts_port_dir = ob.ts_portfolio_dir(W1)
#generate deliverable results
op.print_deliverables(ts_ret, ts_cum_ret, AvrROC)
#plot data
op.plot_portfolio_dynamics(2, rd.T, ts_ret, ts_cum_ret, ts_mean_abs_w, ts_port_dir)
#generate data_matrix for output
data_matrix = op.generate_data_matrix(rd.T, rd.N, rd.stock_dict, \
rd.date_dict, ts_ret, ts_cum_ret, ts_mean_abs_w, ts_port_dir, W1)
#output data to CSV
op.generate_csv_data('data_part1.team_B.csv', data_matrix)
|
d1f8a0bc3a53da52376ced6d644b3483d0db6a59
|
[
"Python"
] | 10 |
Python
|
47pik/quantathon
|
c5ae7ac4a57ca6f9afa8ed27e629da5264731b6f
|
cf4a39db29113ef03d923506fe41e05cef392db2
|
refs/heads/master
|
<file_sep># -*- coding: utf-8 -*-
"""
Created on Sat May 9 16:14:34 2020
@author: crull
"""
import os
from flask import Flask, render_template
app = Flask(__name__)
@app.route('/')
def hello_name():
return render_template("main.html")
if __name__ == '__main__':
port = int(os.environ.get("PORT", 5000))
app.run(host='0.0.0.0', port=port)<file_sep># wasteyourtime
An everloading website.
## Introduction
Wasteyourtime is a minimal web app buit on Python using Flask.
|
4a011e3f88a7aff71b1b0a093bbee4c7c5052523
|
[
"Markdown",
"Python"
] | 2 |
Python
|
clara-9/wasteyourtime
|
83c3ddd786197291697ce95b142261b5db4ba245
|
f2cefb3c987c0851b0c7661ad821a9ff9d86fcb8
|
refs/heads/main
|
<repo_name>sodatodo/single-spa-radar<file_sep>/README.md
# single-spa-radar
a wrapper to check single-spa to use release packages or url
<file_sep>/src/core.ts
import { Application, ExtraProps } from 'single-spa/typings/single-spa'
import { promiseForEach } from './promiseLoop'
const singleSpaRadar = (applicationOrLoadingFns: Application<ExtraProps>[], releaseLoadingFn: Application<ExtraProps>, isRelease: boolean = false) => {
if (isRelease) {
return new Promise((resolve) => {
releaseLoadingFn().then(application => {
resolve(application);
});
})
} else {
if (releaseLoadingFn) {
applicationOrLoadingFns.push(releaseLoadingFn);
}
return new Promise((resolve, reject) => {
promiseForEach(applicationOrLoadingFns, (application) => {
resolve(application);
});
});
}
}
export {
singleSpaRadar,
}<file_sep>/rollup.config.js
import resolve from "@rollup/plugin-node-resolve";
import commonjs from "@rollup/plugin-commonjs";
import babel from "@rollup/plugin-babel";
import nodePolyfills from 'rollup-plugin-node-polyfills';
// import { terser } from "rollup-plugin-terser";
// import { eslint } from 'rollup-plugin-eslint';
// import serve from 'rollup-plugin-serve';
// import livereload from 'rollup-plugin-livereload';
// import pkg from './package.json';
const extensions = ['.js', '.ts'];
const esmConfig = {
input: 'src/index.ts',
output: {
file: 'dist/bundle.js',
format: 'esm',
},
plugins: [
nodePolyfills(),
resolve({
extensions,
}),
babel({
exclude: 'node_modules/**',
extensions,
babelHelpers: 'runtime'
}),
commonjs(),
]
}
export default esmConfig;
<file_sep>/src/promiseLoop.ts
const promiseForEach = (promiseArr, cb) => {
let index = 0;
const nextPromise = () => {
const currentPromise = promiseArr[index];
currentPromise().then((result) => {
cb(result);
}).catch((err) => {
index ++;
if (index >= promiseArr.length) {
throw new Error('no module can resolve');
}
nextPromise();
});
}
return nextPromise();
}
export {
promiseForEach
}<file_sep>/src/scaner.ts
async function checkLoadFn(loadFn) {
try {
const result = await loadFn();
console.log(`result`, result)
} catch (error) {
console.log(`error`, error)
}
}
export {
checkLoadFn,
}<file_sep>/src/index.ts
import { singleSpaRadar } from './core';
export default singleSpaRadar;
|
e7c06d034aa9a108e6cea7755235caed0fdbd440
|
[
"Markdown",
"TypeScript",
"JavaScript"
] | 6 |
Markdown
|
sodatodo/single-spa-radar
|
dd2c341919b66c174e114d7b3e015b20eb911f07
|
3cfcf987c72d5a5387ef312abf6d334036066f87
|
refs/heads/master
|
<repo_name>kushaldelhiwala/CSE-320-HW-2<file_sep>/src/main.c
#include <stdio.h>
#include "warehouse.h"
#include <string.h>
#include <stdlib.h>
#include <getopt.h>
#include <unistd.h>
#include "linked_list.h"
struct warehouse_id {
int id;
struct warehouse_id* next;
};
struct warehouse_id* addWarehouseId(int id){
struct warehouse_id* newWarehouseId = (struct warehouse_id*)malloc(sizeof(struct warehouse_id));
newWarehouseId -> id = id;
newWarehouseId ->next = NULL;
return newWarehouseId;
}
int main(int argc, char** argv){
char user_command [255];
char operation [255];
char operation2 [255];
char warehouse_filename[255];
char print_operation[255];
char name [255];
int size;
int price;
int exit_flag = 0;
int ret = -1;
int q_flag = 0;
int w_flag = 0;
int a_flag = 0;
int s_flag = 0;
char* w_filename = NULL;
char* a_filename = NULL;
char* s_indicator = NULL;
int c;
FILE* warehouse_file;
int load_warehouse_flag = 0;
FILE* art_file;
struct warehouse_sf_list* pointer = sf_head;
int warehouse_occupied = 0;
int total_num_warehouses = 0;
int art_collections_size = 0;
int total_warehouse_capacity = 0;
struct warehouse_id* head_id = NULL;
while((c = getopt(argc, argv, "qw:a:s:")) != -1){
switch(c){
case 'q':
q_flag = 1;
break;
case 'w':
w_flag = 1;
w_filename = optarg;
break;
case 'a':
a_flag = 1;
a_filename = optarg;
break;
case 's':
s_flag = 1;
s_indicator = optarg;
break;
default:
printf("OPTARG ERROR\n");
break;
}
}
if ((q_flag == 1 && w_flag == 0 && a_flag == 0) || (q_flag == 1 && w_flag == 1 && a_flag == 0) || (q_flag == 1 && w_flag == 0 && a_flag == 1)){
printf("FLAG ERROR\n");
exit(0);
}
if (s_indicator != NULL){
if ((strcmp(s_indicator, "s") != 0) && (strcmp(s_indicator, "p") != 0)){
printf("FLAG ERROR\n");
exit(0);
}
}
if (q_flag == 1){
if (access(w_filename, F_OK) != -1) {
warehouse_file = fopen(w_filename, "rt");
int id;
int size;
char type[30];
struct warehouse *temp_warehouse;
struct warehouse_list *temp_warehouse_list;
struct warehouse_sf_list *pointer2;
struct warehouse_sf_list *temp_warehouse_sf;
struct warehouse_id* temp_warehouse_id;
struct warehouse_id* mover_id;
char singleLine[255];
while (!feof(warehouse_file)) {
fgets(singleLine, 255, warehouse_file);
sscanf(singleLine, "%d %d %s", &id, &size, type);
pointer2 = pointer;
mover_id = head_id;
if (size < 4) {
printf("Size cannot be less than 4\n");
exit(0);
}
if (size % 2 != 0) {
printf("Size must be divisible by 2\n");
exit(0);
}
while(mover_id!=NULL){
if (mover_id->id == id){
printf("You cannot have two same IDs\n");
exit (0);
}
mover_id = mover_id->next;
}
mover_id = head_id;
temp_warehouse_id = addWarehouseId(id);
if (mover_id == NULL){
mover_id = temp_warehouse_id;
}
if(mover_id!=NULL){
while(mover_id->next!=NULL){
mover_id= mover_id->next;
}
mover_id->next = temp_warehouse_id;
}
temp_warehouse = createWarehouse(id, size);
temp_warehouse_list = createWarehouseList(temp_warehouse, type, size);
temp_warehouse_list->next_warehouse = NULL;
if (pointer == NULL) {
temp_warehouse_sf = createWarehouseSfList(size, temp_warehouse_list);
pointer = temp_warehouse_sf;
pointer2 = temp_warehouse_sf;
//temp_warehouse_sf -> warehouse_list_head = temp_warehouse_list;
} else if (pointer2 != NULL) {
while (pointer2->sf_next_warehouse != NULL) {
if (pointer2->class_size == size) {
struct warehouse_list *warehouse_list = pointer2->warehouse_list_head;
while (warehouse_list->next_warehouse != NULL) {
warehouse_list = warehouse_list->next_warehouse;
}
warehouse_list->next_warehouse = temp_warehouse_list;
break;
} else {
pointer2 = pointer2->sf_next_warehouse;
}
}
if (pointer2->sf_next_warehouse == NULL) {
if (pointer2->class_size == size) {
struct warehouse_list *warehouse_list = pointer2->warehouse_list_head;
while (warehouse_list->next_warehouse != NULL) {
warehouse_list = warehouse_list->next_warehouse;
}
warehouse_list->next_warehouse = temp_warehouse_list;
} else {
temp_warehouse_sf = createWarehouseSfList(size, temp_warehouse_list);
pointer2->sf_next_warehouse = temp_warehouse_sf;
}
}
}
}
} else {
printf("File cannot be opened\n");
exit(0);
}
if (access(a_filename, F_OK) != -1) {
art_file = fopen(a_filename, "rt");
char name[50];
int size;
int price;
char line[255];
int rep;
struct art_collection *temp_art_collection;
struct warehouse_sf_list *pointer3;
struct warehouse_list *temp_ware_list;
struct warehouse *temp_ware;
uint64_t occupied;
int art_coll_occupied;
while (!feof(art_file)) {
fgets(line, 255, art_file);
rep = sscanf(line, "%s %d %d", name, &size, &price);
if (rep != 3) {
sscanf(line, "\"%[^\"]\" %d %d", name, &size, &price);
}
art_coll_occupied = 0;
temp_art_collection = createArtCollection(name, size, price);
pointer3 = pointer;
if (pointer3 == NULL) {
printf("Currently have no warehouses(lists\n");
} else {
while (pointer3 != NULL) {
if (pointer3->class_size == size) {
temp_ware_list = pointer3->warehouse_list_head;
while (temp_ware_list != NULL) {
occupied = ((temp_ware_list->meta_info) & 2);
if (occupied == 0) {
temp_ware = temp_ware_list->warehouse;
addToWarehouse(temp_ware, temp_art_collection);
//temp_ware_list->warehouse = temp_ware;
(temp_ware_list->meta_info) |= 2;
art_coll_occupied = 1;
break;
} else {
temp_ware_list = temp_ware_list->next_warehouse;
}
}
break;
} else {
pointer3 = pointer3->sf_next_warehouse;
}
}
if (art_coll_occupied == 0) {
pointer3 = pointer;
while (pointer3 != NULL) {
if (pointer3->class_size > size) {
temp_ware_list = pointer3->warehouse_list_head;
while (temp_ware_list != NULL) {
occupied = ((temp_ware_list->meta_info) & 2);
if (occupied == 0) {
temp_ware = temp_ware_list->warehouse;
addToWarehouse(temp_ware, temp_art_collection);
temp_ware_list->warehouse = temp_ware;
(temp_ware_list->meta_info) |= 2;
art_coll_occupied = 1;
break;
} else {
temp_ware_list = temp_ware_list->next_warehouse;
}
}
break;
} else {
pointer3 = pointer3->sf_next_warehouse;
}
}
}
if (art_coll_occupied == 0) {
printf("Sorry, no warehouse found to place art collection\n");
exit(0);
}
}
}
} else {
printf("File cannot be opened\n");
exit(0);
}
while(pointer!= NULL){
struct warehouse_list* cursor = pointer->warehouse_list_head;
while(cursor!=NULL){
if (cursor->warehouse->art_collection == NULL){}
else{
printf("%s %d %d\n", cursor->warehouse->art_collection->name, cursor->warehouse->art_collection->size, cursor->warehouse->art_collection->price);
}
cursor = cursor->next_warehouse;
}
pointer = pointer->sf_next_warehouse;
}
exit(0);
}
do{
printf("Insert Command: ");
fgets(user_command, 255, stdin);
char k = user_command[0];
if (k == 'l'){
ret = sscanf(user_command, "%*s %s %s", operation, warehouse_filename);
if (strcmp(operation, "warehouse") == 0){
//do stuff, check filename
if (access(warehouse_filename, F_OK)!= -1){
load_warehouse_flag = 1;
warehouse_file = fopen(warehouse_filename, "rt");
int id;
int size;
char type[30];
struct warehouse* temp_warehouse;
struct warehouse_list* temp_warehouse_list;
struct warehouse_sf_list* pointer2;
struct warehouse_sf_list* temp_warehouse_sf;
struct warehouse_id* temp_warehouse_id;
struct warehouse_id* mover_id;
char singleLine [255];
while (!feof(warehouse_file)){
fgets (singleLine, 255, warehouse_file);
sscanf(singleLine, "%d %d %s", &id, &size, type);
pointer2 = pointer;
mover_id = head_id;
if (size < 4) {
printf("Size cannot be less than 4\n");
exit (0);
}
if (size % 2 != 0){
printf("Size must be divisible by 2\n");
exit (0);
}
while(mover_id!=NULL){
if (mover_id->id == id){
printf("You cannot have two same IDs\n");
exit (0);
}
mover_id = mover_id->next;
}
mover_id = head_id;
temp_warehouse_id = addWarehouseId(id);
if (head_id == NULL){
head_id = temp_warehouse_id;
}
else if(mover_id!=NULL){
while(mover_id->next!=NULL){
mover_id= mover_id->next;
}
mover_id->next = temp_warehouse_id;
}
temp_warehouse = createWarehouse(id, size);
total_num_warehouses++;
total_warehouse_capacity+=size;
temp_warehouse_list = createWarehouseList(temp_warehouse, type, size);
temp_warehouse_list->next_warehouse = NULL;
if (pointer == NULL){
temp_warehouse_sf = createWarehouseSfList(size, temp_warehouse_list);
pointer = temp_warehouse_sf;
pointer2 = temp_warehouse_sf;
//temp_warehouse_sf -> warehouse_list_head = temp_warehouse_list;
}
else if (pointer2 != NULL){
while (pointer2 -> sf_next_warehouse != NULL){
if (pointer2->class_size == size){
struct warehouse_list* warehouse_list = pointer2->warehouse_list_head;
while (warehouse_list->next_warehouse != NULL){
warehouse_list = warehouse_list -> next_warehouse;
}
warehouse_list->next_warehouse = temp_warehouse_list;
break;
}
else{
pointer2 = pointer2 ->sf_next_warehouse;
}
}
if(pointer2 -> sf_next_warehouse == NULL){
if (pointer2 -> class_size == size){
struct warehouse_list* warehouse_list = pointer2->warehouse_list_head;
while (warehouse_list->next_warehouse != NULL){
warehouse_list = warehouse_list -> next_warehouse;
}
warehouse_list->next_warehouse = temp_warehouse_list;
}
else{
temp_warehouse_sf = createWarehouseSfList(size, temp_warehouse_list);
pointer2 -> sf_next_warehouse = temp_warehouse_sf;
}
}
}
}
}
else{
printf("File cannot be opened\n");
}
}
else if (strcmp(operation, "art") == 0){
//do stuff, check filename
if (access(warehouse_filename, F_OK)!= -1){
art_file = fopen(warehouse_filename, "rt");
//char name[50];
//int size;
//int price;
char line[255];
int rep;
struct art_collection* temp_art_collection;
struct warehouse_sf_list* pointer3;
struct warehouse_list* temp_ware_list;
struct warehouse* temp_ware;
uint64_t occupied;
int art_coll_occupied;
while (!feof(art_file)){
fgets (line, 255, art_file);
rep = sscanf(line, "%s %d %d", name, &size, &price);
if (rep != 3){
sscanf(line, "\"%[^\"]\" %d %d", name, &size, &price);
}
art_coll_occupied = 0;
temp_art_collection = createArtCollection (name, size, price);
art_collections_size+=size;
pointer3 = pointer;
if (pointer3 == NULL){
printf("Currently have no warehouses(lists\n");
break;
}
else {
while (pointer3 != NULL) {
if (pointer3->class_size == size) {
temp_ware_list = pointer3->warehouse_list_head;
while(temp_ware_list != NULL){
occupied = ((temp_ware_list->meta_info) & 2);
if (occupied == 0) {
temp_ware = temp_ware_list->warehouse;
addToWarehouse(temp_ware, temp_art_collection);
warehouse_occupied++;
//temp_ware_list->warehouse = temp_ware;
(temp_ware_list->meta_info) |= 2;
art_coll_occupied = 1;
break;
}
else {
temp_ware_list = temp_ware_list->next_warehouse;
}
}
break;
}
else {
pointer3 = pointer3->sf_next_warehouse;
}
}
if (art_coll_occupied == 0){
pointer3 = pointer;
while (pointer3 != NULL) {
if (pointer3->class_size > size) {
temp_ware_list = pointer3->warehouse_list_head;
while(temp_ware_list != NULL){
occupied = ((temp_ware_list->meta_info) & 2);
if (occupied == 0) {
temp_ware = temp_ware_list->warehouse;
addToWarehouse(temp_ware, temp_art_collection);
warehouse_occupied++;
temp_ware_list->warehouse = temp_ware;
(temp_ware_list->meta_info) |= 2;
art_coll_occupied = 1;
break;
}
else {
temp_ware_list = temp_ware_list->next_warehouse;
}
}
break;
}
else {
pointer3 = pointer3->sf_next_warehouse;
}
}
}
if(art_coll_occupied == 0){
printf("Sorry, no warehouse found to place art collection\n");
exit (0);
}
}
}
}
else{
printf("File cannot be opened\n");
}
}
else{
printf("Such a command is not available. The list of available commands are in help\n");
}
}
else if (k == 'p'){
ret = sscanf(user_command, "%s", print_operation);
if(strcmp(print_operation, "print") != 0 && strcmp(print_operation, "printall")!= 0){
printf("Such a command is not available. This list of available are in help\n");
}
if (strcmp(print_operation, "print") == 0){
ret = sscanf(user_command, "%*s %s", operation);
}
if (strcmp(print_operation, "printall") == 0){
struct warehouse_sf_list* pointer_print = pointer;
while(pointer_print!= NULL){
struct warehouse_list* cursor = pointer_print->warehouse_list_head;
while(cursor!=NULL){
if (cursor->warehouse->art_collection == NULL){}
else {
printf("%s %d %d\n", cursor->warehouse->art_collection->name, cursor->warehouse->art_collection->size, cursor->warehouse->art_collection->price);
}
cursor = cursor->next_warehouse;
}
pointer_print = pointer_print->sf_next_warehouse;
}
}
else if (strcmp(operation, "public") == 0){
struct warehouse_sf_list* pointer_print = pointer;
while(pointer_print!= NULL){
struct warehouse_list* cursor = pointer_print->warehouse_list_head;
while(cursor!=NULL){
if (cursor->warehouse->art_collection == NULL){}
else if(((cursor->meta_info) & 1) == 0) {
printf("%s %d %d\n", cursor->warehouse->art_collection->name, cursor->warehouse->art_collection->size, cursor->warehouse->art_collection->price);
}
cursor = cursor->next_warehouse;
}
pointer_print = pointer_print->sf_next_warehouse;
}
}
else if (strcmp(operation, "private") == 0){
struct warehouse_sf_list* pointer_print = pointer;
while(pointer_print!= NULL){
struct warehouse_list* cursor = pointer_print->warehouse_list_head;
while(cursor!=NULL){
if (cursor->warehouse->art_collection == NULL){}
else if(((cursor->meta_info) & 1) == 1) {
printf("%s %d %d\n", cursor->warehouse->art_collection->name, cursor->warehouse->art_collection->size, cursor->warehouse->art_collection->price);
}
cursor = cursor->next_warehouse;
}
pointer_print = pointer_print->sf_next_warehouse;
}
}
else{
printf("Such a command is not available. This list of available are in help\n");
}
}
else if (k == 'a'){
ret = sscanf(user_command, "%s %s %s %d %d",operation, operation2, name, &size, &price);
if (ret != 5){
ret = sscanf(user_command, "%s %s \"%[^\"]\" %d %d",operation, operation2, name, &size, &price);
}
if (strcmp(operation, "add") != 0){
printf("Such a command is not available. The list of available commands are in help\n");
}
else if (strcmp(operation2, "art") != 0){
printf("Such a command is not available. The list of available commands are in help\n");
}
else {
char name[50];
int size;
int price;
char line[255];
int rep;
struct art_collection *temp_art_collection;
struct warehouse_list *temp_ware_list;
struct warehouse *temp_ware;
struct warehouse_sf_list* pointer3;
uint64_t occupied;
int art_collection_occ = 0;
art_collection_occ = 0;
temp_art_collection = createArtCollection(name, size, price);
art_collections_size+=size;
pointer3 = pointer;
if (pointer3 == NULL) {
printf("Currently have no warehouses(lists\n");
break;
}
else {
while (pointer3 != NULL) {
if (pointer3->class_size == size) {
temp_ware_list = pointer3->warehouse_list_head;
while(temp_ware_list != NULL){
occupied = ((temp_ware_list->meta_info) & 2);
if (occupied == 0) {
temp_ware = temp_ware_list->warehouse;
addToWarehouse(temp_ware, temp_art_collection);
warehouse_occupied++;
//temp_ware_list->warehouse = temp_ware;
(temp_ware_list->meta_info) |= 2;
art_collection_occ = 1;
break;
}
else {
temp_ware_list = temp_ware_list->next_warehouse;
}
}
break;
}
else {
pointer3 = pointer3->sf_next_warehouse;
}
}
if (art_collection_occ == 0){
pointer3 = pointer;
while (pointer3 != NULL) {
if (pointer3->class_size > size) {
temp_ware_list = pointer3->warehouse_list_head;
while(temp_ware_list != NULL){
occupied = ((temp_ware_list->meta_info) & 2);
if (occupied == 0) {
temp_ware = temp_ware_list->warehouse;
addToWarehouse(temp_ware, temp_art_collection);
warehouse_occupied++;
temp_ware_list->warehouse = temp_ware;
(temp_ware_list->meta_info) |= 2;
art_collection_occ = 1;
break;
}
else {
temp_ware_list = temp_ware_list->next_warehouse;
}
}
break;
}
else {
pointer3 = pointer3->sf_next_warehouse;
}
}
}
if (art_collection_occ == 0){
printf("No warehouse available\n");
exit (0);
}
}
}
}
else if(k == 'd'){
ret = sscanf(user_command, "%s %s \"%[^\"]\"", operation, operation2, name);
if (ret != 3){
ret = sscanf(user_command, "%s %s %s", operation, operation2, name);
}
if (strcmp(operation, "delete") != 0){
printf("Such a command is not available. The list of available commands are in help\n");
}
else if (strcmp(operation2, "art") != 0){
printf("Such a command is not available. The list of available commands are in help\n");
}
else {
struct warehouse_sf_list* pointer3;
pointer3 = pointer;
if(pointer3== NULL){
printf("Have no warehouses so cannot delete\n");
}
while(pointer3->sf_next_warehouse!=NULL){
struct warehouse_list* temp_warehouse_list = pointer3->warehouse_list_head;
while(temp_warehouse_list!=NULL){
if (temp_warehouse_list->warehouse->art_collection== NULL){}
else if (strcmp((temp_warehouse_list->warehouse->art_collection->name),(name))==0){
art_collections_size-=temp_warehouse_list->warehouse->art_collection->size;
warehouse_occupied--;
free(temp_warehouse_list->warehouse->art_collection->name);
free(temp_warehouse_list->warehouse->art_collection);
temp_warehouse_list->warehouse->art_collection = NULL;
temp_warehouse_list->meta_info &= ~(1 << 1);
}
temp_warehouse_list = temp_warehouse_list->next_warehouse;
}
pointer3 = pointer3->sf_next_warehouse;
}
if(pointer3->sf_next_warehouse == NULL){
struct warehouse_list* temp_warehouse_list = pointer3->warehouse_list_head;
while(temp_warehouse_list!=NULL){
if (temp_warehouse_list->warehouse->art_collection== NULL){}
else if (strcmp((temp_warehouse_list->warehouse->art_collection->name),(name))==0){
art_collections_size-=temp_warehouse_list->warehouse->art_collection->size;
warehouse_occupied--;
free(temp_warehouse_list->warehouse->art_collection->name);
free(temp_warehouse_list->warehouse->art_collection);
temp_warehouse_list->warehouse->art_collection = NULL;
temp_warehouse_list->meta_info &= ~(1 << 1);
}
temp_warehouse_list = temp_warehouse_list->next_warehouse;
}
}
}
}
else if(k == 'u' || k == 'h' || k == 'e'){
sscanf(user_command, "%s", operation);
if (strcmp(operation, "help") == 0){
printf("Here are the functions of the various commands\n");
printf("help: This command gives a description of all the other commands\n");
printf("load warehouse: This commands populates your linked list of warehouses\n");
printf("load art: This commands populates your warehouse with art collections provided in the file\n");
printf("printall: This commands prints information about all the art collections in database\n");
printf("print public: This command prints information about art collections that are in public warehouses\n");
printf("print private: This command prints information about art collections that are in private warehouses\n");
printf("add art: This commands adds a new art collection to database with given attributes\n");
printf("delete: This command deletes art collections with matching name\n");
printf("utilization: This command prints number of occupied warehouses vs total, and total size of art collections vs total warehouse capacity\n");
printf("exit: will exit program\n");
}
else if(strcmp(operation, "exit") == 0){
exit_flag = 1;
}
else if(strcmp(operation, "utilization") == 0){
double ratio1 = (double)warehouse_occupied/(double)total_num_warehouses;
double ratio2 = (double)art_collections_size/(double)total_warehouse_capacity;
printf("Occupied/Total Ratio: %f\n", ratio1);
printf("Size/Capacity Ratio: %f\n", ratio2);
}
else{
printf("Such a command is not available. The list of available commands are in help\n");
}
}
else{
printf("Such a command is not available. The list of available commands are in help\n");
}
} while(exit_flag == 0);
return 0;
}
<file_sep>/src/linked_list.c
#include <stdlib.h>
#include <stdio.h>
#include "warehouse.h"
#include <string.h>
#include "linked_list.h"
struct art_collection* createArtCollection(char* name, int size, int price){
struct art_collection* newArtCollection = (struct art_collection*)malloc(sizeof(struct art_collection));
newArtCollection -> name = (char*)malloc(50*sizeof(char));
strcpy((newArtCollection -> name), name);
newArtCollection -> size = size;
newArtCollection -> price = price;
return newArtCollection;
}
struct warehouse* createWarehouse(int id, int size){
struct warehouse* newWarehouse = (struct warehouse*)malloc(sizeof(struct warehouse));
newWarehouse -> id = id;
newWarehouse -> size = size;
newWarehouse -> art_collection = NULL;
// Set the art_collection to the passed in art_collection
return newWarehouse;
}
struct warehouse* addToWarehouse(struct warehouse* temp_warehouse, struct art_collection* temp_art_collection){
temp_warehouse -> art_collection = temp_art_collection;
return temp_warehouse;
}
struct warehouse_list* createWarehouseList (struct warehouse* warehouse, char* type, int size){
struct warehouse_list* newWarehouseList = (struct warehouse_list*)malloc(sizeof(struct warehouse_list));
newWarehouseList -> warehouse = warehouse;
newWarehouseList -> next_warehouse = NULL;
uint64_t temp = 0;
temp = size;
temp = temp << 2;
if (strcmp(type, "public") == 0){
uint8_t temp_int = 0;
temp |= temp_int;
}
else if (strcmp(type, "private") == 0){
uint8_t temp_int = 1;
temp |= temp_int;
}
newWarehouseList -> meta_info = temp;
return newWarehouseList;
}
struct warehouse_sf_list* createWarehouseSfList(int class_size, struct warehouse_list* warehouse_list_head){
struct warehouse_sf_list* newWarehouseSfList = (struct warehouse_sf_list*)malloc(sizeof(struct warehouse_sf_list));
newWarehouseSfList -> class_size = class_size;
newWarehouseSfList -> warehouse_list_head = warehouse_list_head;
newWarehouseSfList -> sf_next_warehouse = NULL;
return newWarehouseSfList;
}
<file_sep>/Makefile
all:
gcc src/*.c -o art_db
clean:
rm art_db
<file_sep>/src/linked_list.h
#ifndef LINKED_LIST_H
#define LINKED_LIST_H
struct art_collection* createArtCollection(char* name, int size, int price);
struct warehouse* createWarehouse(int id, int size);
struct warehouse* addToWarehouse(struct warehouse* temp_warehouse, struct art_collection* temp_art_collection);
struct warehouse_list* createWarehouseList (struct warehouse* warehouse, char* type, int size);
struct warehouse_sf_list* createWarehouseSfList(int class_size, struct warehouse_list* warehouse_list_head);
#endif /* LINKED_LIST_H */
|
df918cbea68bc642e789da32e39f73fc07663f70
|
[
"C",
"Makefile"
] | 4 |
C
|
kushaldelhiwala/CSE-320-HW-2
|
fbb5b0d44a5b5bdbb60087603f72b0a893cec524
|
e409944e66c6b7fcda59563285a0ef7bc2185238
|
refs/heads/main
|
<repo_name>BasantHasaan/graghqlProductsapi<file_sep>/README.md
It's product and user api with graphql and apollo_server,
to start this locally run ==>>> npm i #to install node_modules
#edit config for database connection in config/config.json
then run ==>>> npm start
to start using Docker ====> run docker compose-up # to start docker-compose.yaml
#renamme env.example to .env and edit configuration in it
<file_sep>/models/product.js
module.exports = (sequelize, DataTypes) => {
const Product = sequelize.define(
'Product',
{
name: {
type: DataTypes.STRING,
allowNull: false,
},
seller: {
type: DataTypes.STRING,
allowNull: false,
},
price: {
type: DataTypes.FLOAT,
allowNull: false,
},
},
{},
);
Product.associate = (models) => {
Product.belongsTo(models.User, { foreignKey: 'userId' });
};
return Product;
};
<file_sep>/middleware/checkAuth.js
/* eslint-disable no-return-assign */
const jwt = require('jsonwebtoken');
// const config = process.env;
module.exports = (req) => {
const authorize = req.req.headers.authorization;
if (!authorize) {
return (!req.isAuth);
}
const token = authorize.split(' ')[1];
if (!token) {
return req.isAuth = false;
}
try {
const decoded = jwt.verify(token, 'myToken');
return (req.isAuth = true,
req.userId = decoded.userId);
} catch (err) {
return req.isAuth = false;
}
};
<file_sep>/graphql/resolvers/user.js
const bcrypt = require('bcryptjs');
const jwt = require('jsonwebtoken');
const verifyToken = require('../../middleware/checkAuth');
const resolvers = {
Query: {
async user(root, { id }, { models }) {
return models.User.findByPk(id);
},
async allProducts(root, args, { models }) {
return models.Product.findAll(
args.orderBy
? {
order: [['price', args.orderBy.price]],
}
: {},
);
},
async product(root, { id }, { models }) {
return models.Product.findByPk(id);
},
async login(root, { email, password }, { models }) {
const userExist = await models.User.findOne({ where: { email } });
if (!userExist) {
throw new Error('user not found');
}
const isEqual = await bcrypt.compare(password, userExist.password);
if (!isEqual) {
throw new Error('password inCorrect');
}
const token = jwt.sign({ userId: userExist.id }, 'myToken', {
expiresIn: '1h',
});
return { userId: userExist.id, token, tokenExpiration: 1 };
},
},
Mutation: {
async createUser(root, { name, email, password }, { models }) {
return models.User.create({
name,
email,
password: await bcrypt.hash(password, 10),
});
},
async createProduct(root, { name, price, seller }, { models, req }) {
if (!verifyToken(req)) {
throw new Error('not Auth');
}
return models.Product.create({
userId: req.userId,
name,
price,
seller,
});
},
},
User: {
async products(user) {
return user.getProducts();
},
},
Product: {
async user(product) {
return product.getUser();
},
},
};
module.exports = resolvers;
<file_sep>/graphql/schema/index.js
const { gql } = require('apollo-server');
const typeDefs = gql`
type User {
id: Int!
name: String!
email: String!
products: [Product!]!
}
type Product {
id: Int!
name: String!
price: Float!
seller: String!
user: User!
}
type AuthData {
id: ID!
token: String!
tokenExpiration : Int!
}
type Query {
user(id: Int!): User
allProducts(orderBy: LinkOrderByInput): [Product!]!
product(id: Int!): Product
login(email: String!, password: String!): AuthData!
}
type Mutation {
createUser(name: String!, email: String!, password: String!): User!
createProduct(
userId: Int!
name: String!
price: Float!
seller: String!
): Product!
}
input LinkOrderByInput {
price: Sort
}
enum Sort {
asc
desc
}
`;
module.exports = typeDefs;
|
340b51410e8ce890a4024cbbe252606f943e4754
|
[
"Markdown",
"JavaScript"
] | 5 |
Markdown
|
BasantHasaan/graghqlProductsapi
|
a30bb5c0d610708567bfc74a6add8c6ec2a801e1
|
f47a72377b5fab393cd1d7b271d281bd36d0fba4
|
refs/heads/master
|
<file_sep>
import program
import unittest2
class TestProgram(unittest2.TestCase):
def test_case_1(self):
output = program.twoSum([3, 5, -4, 8, 11, 1, -1, 6], 10)
self.assertTrue(len(output) == 2)
self.assertTrue(11 in output)
self.assertTrue(-1 in output)
|
11d35c6cc5c5157b5f94bf205720353696bbeba1
|
[
"Python"
] | 1 |
Python
|
pallavipa/Daily_Coding
|
70d428dc8591c4048b9f95cb52c01e90b66742e5
|
2ffea80d24f25ad507ed06f8cbb1ba9e01685c60
|
refs/heads/master
|
<repo_name>jgrau/MODx-CMS<file_sep>/install/setup.updates.sql
# MODx Database Script for New/Upgrade Installations
#
# Each sql command is separated by double lines
#
# Update categories for table `site_snippets`
#
UPDATE `{PREFIX}site_snippets` SET `category` = '2' WHERE `name` IN ('MemberCheck', 'Personalize', 'WebChangePwd', 'WebLogin', 'WebSignup');
UPDATE `{PREFIX}site_snippets` SET `category` = '3' WHERE `name` IN ('Ditto', 'Jot', 'ListIndexer', 'Reflect');
UPDATE `{PREFIX}site_snippets` SET `category` = '4' WHERE `name` IN ('Breadcrumbs','FirstChildRedirect','UltimateParent','Wayfinder');
UPDATE `{PREFIX}site_snippets` SET `category` = '5' WHERE `name` IN ('eForm');
UPDATE `{PREFIX}site_snippets` SET `category` = '10' WHERE `name` IN ('AjaxSearch');
#
# Update categories for table `site_plugins`
#
UPDATE `{PREFIX}site_plugins` SET `category` = '10' WHERE `name` IN ('Search Highlighting');
UPDATE `{PREFIX}site_plugins` SET `category` = '6' WHERE `name` IN ('Forgot Manager Login','Inherit Parent Template','TinyMCE','QM+','Transalias','Image TV Preview');
#
# Update categories for table `site_tmplvars`
#
UPDATE `{PREFIX}site_tmplvars` SET `category` = '9' WHERE `name` IN ('blogContent','loginName');
UPDATE `{PREFIX}site_tmplvars` SET `category` = '3' WHERE `name` IN ('documentTags');
<file_sep>/manager/includes/mutate_settings.ajax.php
<?php
/**
* mutate_settings.ajax.php
*
*/
require_once(dirname(__FILE__) . '/protect.inc.php');
$action = preg_replace('/[^A-Za-z0-9_\-\.\/]/', '', $_POST['action']);
$lang = preg_replace('/[^A-Za-z0-9_\-\.\/]/', '', $_POST['lang']);
$key = preg_replace('/[^A-Za-z0-9_\-\.\/]/', '', $_POST['key']);
$str = '';
if($action == 'get') {
$langfile = dirname(__FILE__) . '/lang/'.$lang.'.inc.php';
if(file_exists($langfile)) {
$str = getLangStringFromFile($langfile, $key);
}
}
echo $str;
function getLangStringFromFile($file, $key) {
include($file);
return $_lang[$key];
}<file_sep>/install/connection.collation.php
<?php
$host = $_POST['host'];
$uid = $_POST['uid'];
$pwd = $_POST['pwd'];
$database_collation = $_POST['database_collation'];
$output = '<select id="database_collation" name="database_collation">
<option value="'.$database_collation.'" selected >'.$database_collation.'</option></select>';
if ($conn = @ mysql_connect($host, $uid, $pwd)) {
// get collation
$getCol = mysql_query("SHOW COLLATION");
if (@mysql_num_rows($getCol) > 0) {
$output = '<select id="database_collation" name="database_collation">';
while ($row = mysql_fetch_row($getCol)) {
$selected = ( $row[0]==$database_collation ? ' selected' : '' );
$output .= '<option value="'.$row[0].'"'.$selected.'>'.$row[0].'</option>';
}
$output .= '</select>';
}
}
echo $output;
?><file_sep>/manager/includes/lang/country/japanese-utf8_country.inc.php
<?php
/**
* Country List Language File
* Version: 1.0
* Date: 31/10/2006
* Translated: 8/12/2006
* Array keys refer to the ID number that is saved in the database
*
*/
$_country_lang["1"] = 'アフガニスタン'; // Afghanistan
$_country_lang["2"] = 'アルバニア'; // Albania
$_country_lang["3"] = 'アルジェリア'; // Algeria
$_country_lang["4"] = '米領サモア'; // American Samoa
$_country_lang["5"] = 'アンドラ'; // Andorra
$_country_lang["6"] = 'アンゴラ'; // Angola
$_country_lang["7"] = 'アンギラ'; // Anguilla
$_country_lang["8"] = '南極'; // Antarctica
$_country_lang["9"] = 'アンティグア・バーブーダ'; // Antigua and Barbuda
$_country_lang["10"] = 'アルゼンチン'; // Argentina
$_country_lang["11"] = 'アルメニア'; // Armenia
$_country_lang["12"] = 'アルバ'; // Aruba
$_country_lang["13"] = 'オーストラリア'; // Australia
$_country_lang["14"] = 'オーストリア'; // Austria
$_country_lang["15"] = 'アゼルバイジャン'; // Azerbaijan
$_country_lang["16"] = 'バハマ'; // Bahamas
$_country_lang["17"] = 'バーレーン'; // Bahrain
$_country_lang["18"] = 'バングラディッシュ'; // Bangladesh
$_country_lang["19"] = 'バルバドス'; // Barbados
$_country_lang["20"] = '白ロシア'; // Belarus
$_country_lang["21"] = 'ベルギー'; // Belgium
$_country_lang["22"] = 'ベリーズ'; // Belize
$_country_lang["23"] = 'ベニン'; // Benin
$_country_lang["24"] = 'バミューダ'; // Bermuda
$_country_lang["25"] = 'ブータン'; // Bhutan
$_country_lang["26"] = 'ボリビア'; // Bolivia
$_country_lang["27"] = 'ボスニア・ヘルツェコビナ'; // Bosnia and Herzegowina
$_country_lang["28"] = 'ボツワナ'; // Botswana
$_country_lang["29"] = 'ブーベ島'; // Bouvet Island
$_country_lang["30"] = 'ブラジル'; // Brazil
$_country_lang["31"] = '英領インド洋地域'; // British Indian Ocean Territory
$_country_lang["32"] = 'ブルネイ・ダルサラーム'; // Brunei Darussalam
$_country_lang["33"] = 'ブルガリア'; // Bulgaria
$_country_lang["34"] = 'ブルキナ・ファッソ'; // Burkina Faso
$_country_lang["35"] = 'ブルンジ'; // Burundi
$_country_lang["36"] = 'カンボジア'; // Cambodia
$_country_lang["37"] = 'カメルーン'; // Cameroon
$_country_lang["38"] = 'カナダ'; // Canada
$_country_lang["39"] = 'カーボベルデ'; // Cape Verde
$_country_lang["40"] = 'ケイマン諸島'; // Cayman Islands
$_country_lang["41"] = '中央アフリカ共和国'; // Central African Republic
$_country_lang["42"] = 'チャド'; // Chad
$_country_lang["43"] = 'チリ'; // Chile
$_country_lang["44"] = '中国'; // China
$_country_lang["45"] = 'クリスマス島'; // Christmas Island
$_country_lang["46"] = 'ココス諸島'; // Cocos (Keeling) Islands
$_country_lang["47"] = 'コロンビア'; // Colombia
$_country_lang["48"] = 'コモロ'; // Comoros
$_country_lang["49"] = 'コンゴ共和国'; // Congo
$_country_lang["50"] = 'クック諸島'; // Cook Islands
$_country_lang["51"] = 'コスタリカ'; // Costa Rica
$_country_lang["52"] = 'コートジボワール'; // Cote D'Ivoire
$_country_lang["53"] = 'クロアチア'; // Croatia
$_country_lang["54"] = 'キューバ'; // Cuba
$_country_lang["55"] = 'キプロス'; // Cyprus
$_country_lang["56"] = 'チェコ'; // Czech Republic
$_country_lang["57"] = 'デンマーク'; // Denmark
$_country_lang["58"] = 'ジブチ'; // Djibouti
$_country_lang["59"] = 'ドミニカ国'; // Dominica
$_country_lang["60"] = 'ドミニカ共和国'; // Dominican Republic
$_country_lang["61"] = '東チモール'; // East Timor
$_country_lang["62"] = 'エクアドル'; // Ecuador
$_country_lang["63"] = 'エジプト'; // Egypt
$_country_lang["64"] = 'エルサルバドル'; // El Salvador
$_country_lang["65"] = '赤道ギニア'; // Equatorial Guinea
$_country_lang["66"] = 'エリトリア'; // Eritrea
$_country_lang["67"] = 'エストニア'; // Estonia
$_country_lang["68"] = 'エチオピア'; // Ethiopia
$_country_lang["69"] = 'フォークランド諸島'; // Falkland Islands (Malvinas)
$_country_lang["70"] = 'フェロー諸島'; // Faroe Islands
$_country_lang["71"] = 'フィジー'; // Fiji
$_country_lang["72"] = 'フィンランド'; // Finland
$_country_lang["73"] = 'フランス'; // France
$_country_lang["74"] = 'フランス(本国)'; // France, Metropolitan
$_country_lang["75"] = '仏領ギニア'; // French Guiana
$_country_lang["76"] = '仏領ポリネシア'; // French Polynesia
$_country_lang["77"] = '仏領南方地域'; // French Southern Territories
$_country_lang["78"] = 'ガボン'; // Gabon
$_country_lang["79"] = 'ガンビア'; // Gambia
$_country_lang["80"] = 'グルジア'; // Georgia
$_country_lang["81"] = 'ドイツ'; // Germany
$_country_lang["82"] = 'ガーナ'; // Ghana
$_country_lang["83"] = 'ジブラルタル'; // Gibraltar
$_country_lang["84"] = 'ギリシア'; // Greece
$_country_lang["85"] = 'グリーンランド'; // Greenland
$_country_lang["86"] = 'グレナダ'; // Grenada
$_country_lang["87"] = 'グアドループ'; // Guadeloupe
$_country_lang["88"] = 'グアム'; // Guam
$_country_lang["89"] = 'グァテマラ'; // Guatemala
$_country_lang["90"] = 'ギニア'; // Guinea
$_country_lang["91"] = 'ギニアビサウ'; // Guinea-bissau
$_country_lang["92"] = 'ギヤナ'; // Guyana
$_country_lang["93"] = 'ハイチ'; // Haiti
$_country_lang["94"] = 'ハード島とマクドナルド諸島'; // Heard and Mc Donald Islands
$_country_lang["95"] = 'ホンジュラス'; // Honduras
$_country_lang["96"] = '香港'; // Hong Kong
$_country_lang["97"] = 'ハンガリ'; // Hungary
$_country_lang["98"] = 'アイスランド'; // Iceland
$_country_lang["99"] = 'インド'; // India
$_country_lang["100"] = 'インドネシア'; // Indonesia
$_country_lang["101"] = 'イラン・イスラム共和国'; // Iran (Islamic Republic of)
$_country_lang["102"] = 'イラク'; // Iraq
$_country_lang["103"] = 'アイルランド'; // Ireland
$_country_lang["104"] = 'イスラエル'; // Israel
$_country_lang["105"] = 'イタリア'; // Italy
$_country_lang["106"] = 'ジャマイカ'; // Jamaica
$_country_lang["107"] = '日本'; // Japan
$_country_lang["108"] = 'ヨルダン'; // Jordan
$_country_lang["109"] = 'カザフスタン'; // Kazakhstan
$_country_lang["110"] = 'ケニア'; // Kenya
$_country_lang["111"] = 'キリバス'; // Kiribati
$_country_lang["112"] = '北朝鮮'; // Korea, Democratic People's Republic of
$_country_lang["113"] = '韓国'; // Korea, Republic of
$_country_lang["114"] = 'クェート'; // Kuwait
$_country_lang["115"] = 'キルギスタン'; // Kyrgyzstan
$_country_lang["116"] = 'ラオス'; // Lao People's Democratic Republic
$_country_lang["117"] = 'ラトビア'; // Latvia
$_country_lang["118"] = 'レバノン'; // Lebanon
$_country_lang["119"] = 'レソト'; // Lesotho
$_country_lang["120"] = 'リベリア'; // Liberia
$_country_lang["121"] = 'リビア'; // Libyan Arab Jamahiriya
$_country_lang["122"] = 'リヒテンシュタイン'; // Liechtenstein
$_country_lang["123"] = 'リトアニア'; // Lithuania
$_country_lang["124"] = 'ルクセンブルグ'; // Luxembourg
$_country_lang["125"] = 'マカオ'; // Macau
$_country_lang["126"] = 'マケドニア'; // Macedonia, The Former Yugoslav Republic of
$_country_lang["127"] = 'マダガスカル'; // Madagascar
$_country_lang["128"] = 'マラウイ'; // Malawi
$_country_lang["129"] = 'マレーシア'; // Malaysia
$_country_lang["130"] = 'モルディブ'; // Maldives
$_country_lang["131"] = 'マリ'; // Mali
$_country_lang["132"] = 'マルタ'; // Malta
$_country_lang["133"] = 'マーシャル諸島'; // Marshall Islands
$_country_lang["134"] = 'マルティニーク'; // Martinique
$_country_lang["135"] = 'モーリタニア'; // Mauritania
$_country_lang["136"] = 'モーリシャス'; // Mauritius
$_country_lang["137"] = 'マヨット'; // Mayotte
$_country_lang["138"] = 'メキシコ'; // Mexico
$_country_lang["139"] = 'ミクロネシア連邦'; // Micronesia, Federated States of
$_country_lang["140"] = 'モルドバ共和国'; // Moldova, Republic of
$_country_lang["141"] = 'モナコ'; // Monaco
$_country_lang["142"] = 'モンゴル'; // Mongolia
$_country_lang["241"] = 'モンテネグロ'; //Montenegro
$_country_lang["143"] = 'モントセラト'; // Montserrat
$_country_lang["144"] = 'モロッコ'; // Morocco
$_country_lang["145"] = 'モザンビーク'; // Mozambique
$_country_lang["146"] = 'ミャンマー'; // Myanmar
$_country_lang["147"] = 'ナミビア'; // Namibia
$_country_lang["148"] = 'ナウル'; // Nauru
$_country_lang["149"] = 'ネパール'; // Nepal
$_country_lang["150"] = 'オランダ'; // Netherlands
$_country_lang["151"] = '蘭領アンティル'; // Netherlands Antilles
$_country_lang["152"] = 'ニューカレドニア'; // New Caledonia
$_country_lang["153"] = 'ニュージーランド'; // New Zealand
$_country_lang["154"] = 'ニカラグア'; // Nicaragua
$_country_lang["155"] = 'ニジェール'; // Niger
$_country_lang["156"] = 'ナイジェリア'; // Nigeria
$_country_lang["157"] = 'ニウエ'; // Niue
$_country_lang["158"] = 'ノーフォーク島'; // Norfolk Island
$_country_lang["159"] = '北マリアナ諸島'; // Northern Mariana Islands
$_country_lang["160"] = 'ノルウェー'; // Norway
$_country_lang["161"] = 'オマーン'; // Oman
$_country_lang["162"] = 'パキスタン'; // Pakistan
$_country_lang["163"] = 'パラオ'; // Palau
$_country_lang["164"] = 'パナマ'; // Panama
$_country_lang["165"] = 'パプア・ニューギニア'; // Papua New Guinea
$_country_lang["166"] = 'パラグアイ'; // Paraguay
$_country_lang["167"] = 'ペルー'; // Peru
$_country_lang["168"] = 'フィリピン'; // Philippines
$_country_lang["169"] = 'ピトケアン諸島'; // Pitcairn
$_country_lang["170"] = 'ポーランド'; // Poland
$_country_lang["171"] = 'ポルトガル'; // Portugal
$_country_lang["172"] = 'プエルトリコ'; // Puerto Rico
$_country_lang["173"] = 'カタール'; // Qatar
$_country_lang["174"] = 'レユニオン'; // Reunion
$_country_lang["175"] = 'ルーマニア'; // Romania
$_country_lang["176"] = 'ロシア連邦'; // Russian Federation
$_country_lang["177"] = 'ルワンダ'; // Rwanda
$_country_lang["178"] = 'セントキッツ・ネイビス'; // Saint Kitts and Nevis
$_country_lang["179"] = 'セントルシア'; // Saint Lucia
$_country_lang["180"] = 'セントビンセントおよびグレナディーン諸島'; // Saint Vincent and the Grenadines
$_country_lang["181"] = 'サモア'; // Samoa
$_country_lang["182"] = 'サンマリノ'; // San Marino
$_country_lang["183"] = 'サントメ・プリンシペ'; // Sao Tome and Principe
$_country_lang["184"] = 'サウジアラビア'; // Saudi Arabia
$_country_lang["185"] = 'セネガル'; // Senegal
$_country_lang["240"] = 'セルビア'; //Serbia
$_country_lang["186"] = 'セイシェル'; // Seychelles
$_country_lang["187"] = 'シエラレオネ'; // Sierra Leone
$_country_lang["188"] = 'シンガポール'; // Singapore
$_country_lang["189"] = 'スロバキア'; // Slovakia (Slovak Republic)
$_country_lang["190"] = 'スロベニア'; // Slovenia
$_country_lang["191"] = 'ソロモン諸島'; // Solomon Islands
$_country_lang["192"] = 'ソマリア'; // Somalia
$_country_lang["193"] = '南アフリカ'; // South Africa
$_country_lang["194"] = 'サウスジョージア・サウスサンドウィッチ諸島'; // South Georgia and the South Sandwich Islands
$_country_lang["195"] = 'スペイン'; // Spain
$_country_lang["196"] = 'スリランカ'; // Sri Lanka
$_country_lang["197"] = 'セントヘレナ'; // St. Helena
$_country_lang["198"] = 'サンピエール・ミクロン'; // St. Pierre and Miquelon
$_country_lang["199"] = 'スーダン'; // Sudan
$_country_lang["200"] = 'スリナム'; // Suriname
$_country_lang["201"] = 'スヴァールバル・ヤンマイエン諸島'; // Svalbard and Jan Mayen Islands
$_country_lang["202"] = 'スワジランド'; // Swaziland
$_country_lang["203"] = 'スェーデン'; // Sweden
$_country_lang["204"] = 'スイス'; // Switzerland
$_country_lang["205"] = 'シリア'; // Syrian Arab Republic
$_country_lang["206"] = '台湾'; // Taiwan
$_country_lang["207"] = 'タジキスタン'; // Tajikistan
$_country_lang["208"] = 'タンザニア'; // Tanzania, United Republic of
$_country_lang["209"] = 'タイ'; // Thailand
$_country_lang["210"] = 'トーゴ'; // Togo
$_country_lang["211"] = 'トケラウ'; // Tokelau
$_country_lang["212"] = 'トンガ'; // Tonga
$_country_lang["213"] = 'トリニダード・トバゴ'; // Trinidad and Tobago
$_country_lang["214"] = 'チュニジア'; // Tunisia
$_country_lang["215"] = 'トルコ'; // Turkey
$_country_lang["216"] = 'トルクメニスタン'; // Turkmenistan
$_country_lang["217"] = 'タークス・ケイコス諸島'; // Turks and Caicos Islands
$_country_lang["218"] = 'ツバル'; // Tuvalu
$_country_lang["219"] = 'ウガンダ'; // Uganda
$_country_lang["220"] = 'ウクライナ'; // Ukraine
$_country_lang["221"] = 'アラブ首長国連邦'; // United Arab Emirates
$_country_lang["222"] = 'イギリス'; // United Kingdom
$_country_lang["223"] = 'アメリカ'; // United States
$_country_lang["224"] = '米領太平洋諸島'; // United States Minor Outlying Islands
$_country_lang["225"] = 'ウルグアイ'; // Uruguay
$_country_lang["226"] = 'ウズベキスタン'; // Uzbekistan
$_country_lang["227"] = 'バヌアツ'; // Vanuatu
$_country_lang["228"] = 'ヴァチカン'; // Vatican City State (Holy See)
$_country_lang["229"] = 'ベネズエラ'; // Venezuela
$_country_lang["230"] = 'ベトナム'; // Viet Nam
$_country_lang["231"] = '英領バージン諸島'; // Virgin Islands (British)
$_country_lang["232"] = '米領バージン諸島'; // Virgin Islands (U.S.)
$_country_lang["233"] = 'ウォリス・フツナ諸島'; // Wallis and Futuna Islands
$_country_lang["234"] = '西サハラ'; // Western Sahara
$_country_lang["235"] = 'イエメン'; // Yemen
$_country_lang["236"] = 'セルビア・モンテネグロ'; // DEPRECATED: kept for backwards compatibility, Serbia and Montenegro
$_country_lang["237"] = 'コンゴ民主共和国'; // Congo, Democratic Republic of the
$_country_lang["238"] = 'ザンビア'; // Zambia
$_country_lang["239"] = 'ジンバブエ'; // Zimbabwe
?><file_sep>/install/lang.php
<?php
/**
* Multilanguage functions for MODx Installer
*
* @author davaeron
* @package MODx
* @version 1.0
*
* Filename: /install/lang.php
*/
$_lang = array ();
#default fallback language file - english
require_once("lang/english/english.inc.php");
$install_language = "english";
if (isset($_POST['language'])) {
$install_language = $_POST['language'];
} else {
if (isset($_GET['language']))
$install_language = $_GET['language'];
}
$manager_language = "english";
if (isset($_POST['managerlanguage'])) {
$manager_language = $_POST['managerlanguage'];
} else {
if (isset($_GET['managerlanguage']))
$manager_language = $_GET['managerlanguage'];
}
# load language file
if($install_language!="english" && file_exists("lang/".$install_language."/".$install_language.".inc.php")) {
include_once "lang/".$install_language."/".$install_language.".inc.php";
}
/**
* Multilanguage Image include function with fallback
*
*/
function include_image ($image) {
global $install_language;
$result = "lang/english/images/" . $image;
if($install_language!="english" && file_exists("lang/" . $install_language . "/images/" . $image)) {
$result = "lang/" . $install_language . "/images/" . $image;
} else {
$result = "lang/english/images/" . $image;
}
return $result;
}
?><file_sep>/manager/includes/version.inc.php
<?php
$version = '1.0.0'; /* Current version */
$code_name = 'rev 5601'; /* SVN version number */
$full_appname = 'MODx'.' '.$version.($code_name ? " ($code_name)":"");
$small_version = '0';
$patch_level = '0';<file_sep>/install/connection.js
window.addEvent('domready', function(){
// get collation from the database server
$('servertest').addEvent('click', function(e) {
e = new Event(e).stop();
var url = "connection.collation.php";
host = $('databasehost').value;
uid = $('databaseloginname').value;
pwd = $('databaselogin<PASSWORD>').value;
database_collation = $('database_collation').value;
database_connection_method = $('database_connection_method').value;
var pars = Object.toQueryString({
q: url,
host: host,
uid: uid,
pwd: pwd,
database_collation: database_collation,
database_connection_method: database_connection_method,
language: language
});
new Ajax(url, { postBody: pars, update: $('collation'), onComplete: testServer } ).request();
});
// database test
$('databasetest').addEvent('click', function(e) {
e = new Event(e).stop();
var url = "connection.databasetest.php";
host = $('databasehost').value;
uid = $('databaseloginname').value;
pwd = $('<PASSWORD>').value;
database_name = $('database_name').value;
tableprefix = $('tableprefix').value;
database_collation = $('database_collation').value;
database_connection_method = $('database_connection_method').value;
var pars = Object.toQueryString({
q: url,
host: host,
uid: uid,
pwd: pwd,
database_name: database_name,
tableprefix: tableprefix,
database_collation: database_collation,
database_connection_method: database_connection_method,
language: language,
installMode: installMode
});
new Ajax(url, { postBody: pars, update: $('databasestatus'), onComplete: setDefaults } ).request();
});
Slider1 = new Fx.Slide('setCollation', {duration:477});//transition:Fx.Sine.easeOut,
Slider1.hide();
$('setCollation').style.backgroundColor = '#ffff00';
$('setCollation').style.display = 'block';
if(document.getElementById('AUH')) {
Slider2 = new Fx.Slide('AUH', {duration:477});//transition:Fx.Sine.easeOut,
Slider2.hide();
$('AUH').style.display = 'block';
$('AUH').style.backgroundColor = '#ffff00';
}
});
function testServer(){
// get the server test status as soon as collation received
var url = "connection.servertest.php";
host = $('databasehost').value;
uid = $('databaseloginname').value;
pwd = $('databaseloginpassword').value;
var pars = Object.toQueryString({
q: url,
host: host,
uid: uid,
pwd: pwd,
language: language
});
new Ajax(url, { postBody: pars, update: $('serverstatus'), onComplete: setColor } ).request();
}
function setDefaults(){
if($('databasestatus').innerHTML.indexOf(passed_db) >= 0 && document.getElementById('AUH')) {
window.Slider2.slideIn();
var Slider2FX = new Fx.Styles('AUHMask', {duration: 997,transition: Fx.Transitions.linear});
Slider2FX.start({'opacity':[0,1]});
window.setTimeout("$('AUH').style.backgroundColor = '#ffffff';", 1000);
Slider2Scroll = new Fx.Scroll(window);
Slider2Scroll.toElement('managerlanguage_select');
}
}
function setColor(){
var col = $('database_collation');
ss = document.getElementById('serverstatus');
ssv = ss.innerHTML;
if (ssv.indexOf(passed) >=0) {
col.setStyle('background-color', '#9CCD00');
// col.setStyle('color', '#0000CD');
col.setStyle('font-weight','bold');
window.Slider1.slideIn(); //toggle the slider up and down.
var Slider1FX = new Fx.Styles('collationMask', {duration: 997,transition: Fx.Transitions.linear});
Slider1FX.start({'opacity':[0,1]});
window.setTimeout("$('setCollation').style.backgroundColor = '#ffffff';", 1000);
Slider1Scroll = new Fx.Scroll(window);
Slider1Scroll.toElement('databasestatus');
$('database_name').focus();
}
}<file_sep>/manager/includes/lang/country/persian_country.inc.php
<?php
/**
* Country List Language File
* Version: 1.0
* Date: 13/12/2006
* Translation: Mohsen (MotSmart ~ www.modxcms.ir)
* Array keys refer to the ID number that is saved in the database
*
*/
$_country_lang["1"] = 'افغانستان'; // Afghanistan
$_country_lang["2"] = 'آلبانی'; // Albania
$_country_lang["3"] = 'الجزیره'; // Algeria
$_country_lang["4"] = 'ساموئای امریکا'; // American Samoa
$_country_lang["5"] = 'آندورا'; // Andorra
$_country_lang["6"] = 'آنگولا'; // Angola
$_country_lang["7"] = 'آنگولیا'; // Anguilla
$_country_lang["8"] = 'آنتاراکتیا'; // Antarctica
$_country_lang["9"] = 'آنتیکوآ و باربودا'; // Antigua and Barbuda
$_country_lang["10"] = 'آرژانتین'; // Argentina
$_country_lang["11"] = 'ارمنستان'; // Armenia
$_country_lang["12"] = 'آروبا'; // Aruba
$_country_lang["13"] = 'استرالیا'; // Australia
$_country_lang["14"] = 'اتریش'; // Austria
$_country_lang["15"] = 'آذربایجان'; // Azerbaijan
$_country_lang["16"] = 'باهاماس'; // Bahamas
$_country_lang["17"] = 'بحرین'; // Bahrain
$_country_lang["18"] = 'بنگلادش'; // Bangladesh
$_country_lang["19"] = 'بابادوس'; // Barbados
$_country_lang["20"] = 'بلاروس'; // Belarus
$_country_lang["21"] = 'بلژیک'; // Belgium
$_country_lang["22"] = 'بلیزه'; // Belize
$_country_lang["23"] = 'بنین'; // Benin
$_country_lang["24"] = 'برمودا'; // Bermuda
$_country_lang["25"] = 'بوتان'; // Bhutan
$_country_lang["26"] = 'بولیوی'; // Bolivia
$_country_lang["27"] = 'بوسنی و هرزگوین'; // Bosnia and Herzegowina
$_country_lang["28"] = 'بوتسوانا'; // Botswana
$_country_lang["29"] = 'جزایر بوئووت'; // Bouvet Island
$_country_lang["30"] = 'برزیل'; // Brazil
$_country_lang["31"] = 'مستحفضات دریای هند و انگلیس'; // British Indian Ocean Territory
$_country_lang["32"] = 'بورونئی'; // Brunei Darussalam
$_country_lang["33"] = 'بلغارستان'; // Bulgaria
$_country_lang["34"] = 'بورکینا فاسو'; // Burkina Faso
$_country_lang["35"] = 'بروندی'; // Burundi
$_country_lang["36"] = 'کامبوجیه'; // Cambodia
$_country_lang["37"] = 'کامرون'; // Cameroon
$_country_lang["38"] = 'کانادا'; // Canada
$_country_lang["39"] = 'کیپ ورده'; // Cape Verde
$_country_lang["40"] = 'جزایر کایمان'; // Cayman Islands
$_country_lang["41"] = 'جمهوری آفریقای مرکزی'; // Central African Republic
$_country_lang["42"] = 'چاد'; // Chad
$_country_lang["43"] = 'شیلی'; // Chile
$_country_lang["44"] = 'چین'; // China
$_country_lang["45"] = 'جزایر کریسمس'; // Christmas Island
$_country_lang["46"] = 'جزایر کوکوس'; // Cocos (Keeling) Islands
$_country_lang["47"] = 'کلمبیا'; // Colombia
$_country_lang["48"] = 'کامرون'; // Comoros
$_country_lang["49"] = 'جمهوری کنگو'; // Congo
$_country_lang["50"] = 'جزایر کوک'; // Cook Islands
$_country_lang["51"] = 'کاستاریکا'; // Costa Rica
$_country_lang["52"] = 'کوت د و یور'; // Cote D'Ivoire
$_country_lang["53"] = 'کرواسی'; // Croatia
$_country_lang["54"] = 'کوبا'; // Cuba
$_country_lang["55"] = 'قبرس'; // Cyprus
$_country_lang["56"] = 'جمهوری چک'; // Czech Republic
$_country_lang["57"] = 'دانمارک'; // Denmark
$_country_lang["58"] = 'جیبوتی'; // Djibouti
$_country_lang["59"] = 'دومینیک'; // Dominica
$_country_lang["60"] = 'جمهوی دومینیک'; // Dominican Republic
$_country_lang["61"] = 'تیمور شرقی'; // East Timor
$_country_lang["62"] = 'اکوادور'; // Ecuador
$_country_lang["63"] = 'مصر'; // Egypt
$_country_lang["64"] = 'السالوادور'; // El Salvador
$_country_lang["65"] = 'گینه ی نو'; // Equatorial Guinea
$_country_lang["66"] = 'اریتره'; // Eritrea
$_country_lang["67"] = 'استونی'; // Estonia
$_country_lang["68"] = 'اتیوپی'; // Ethiopia
$_country_lang["69"] = 'جزایر مالوین'; // Falkland Islands (Malvinas)
$_country_lang["70"] = 'جزایر فارو'; // Faroe Islands
$_country_lang["71"] = 'فیجی'; // Fiji
$_country_lang["72"] = 'فنلاند'; // Finland
$_country_lang["73"] = 'فرانسه'; // France
$_country_lang["74"] = 'متروپولیتن فرانسه'; // France, Metropolitan
$_country_lang["75"] = 'گویئانای فرانسه'; // French Guiana
$_country_lang["76"] = 'پولینسیای فرانسه'; // French Polynesia
$_country_lang["77"] = 'مستحفضات جنوبی فرانسه'; // French Southern Territories
$_country_lang["78"] = 'گابون'; // Gabon
$_country_lang["79"] = 'گامبیا'; // Gambia
$_country_lang["80"] = 'گرجستان'; // Georgia
$_country_lang["81"] = 'آلمان'; // Germany
$_country_lang["82"] = 'غنا'; // Ghana
$_country_lang["83"] = 'گیبرالتار'; // Gibraltar
$_country_lang["84"] = 'یونان'; // Greece
$_country_lang["85"] = 'گرینلند'; // Greenland
$_country_lang["86"] = 'گرانادا'; // Grenada
$_country_lang["87"] = 'گوادلوپه'; // Guadeloupe
$_country_lang["88"] = 'گوام'; // Guam
$_country_lang["89"] = 'گواتمالا'; // Guatemala
$_country_lang["90"] = 'گینه'; // Guinea
$_country_lang["91"] = 'گینه بیسائو'; // Guinea-bissau
$_country_lang["92"] = 'گویانا'; // Guyana
$_country_lang["93"] = 'هائیتی'; // Haiti
$_country_lang["94"] = 'جزایر هیئرد و مک دانلد'; // Heard and Mc Donald Islands
$_country_lang["95"] = 'هندوراس'; // Honduras
$_country_lang["96"] = 'هنگ کنگ'; // Hong Kong
$_country_lang["97"] = 'لهستان'; // Hungary
$_country_lang["98"] = 'آیسلند'; // Iceland
$_country_lang["99"] = 'هندوستان'; // India
$_country_lang["100"] = 'اندونزی'; // Indonesia
$_country_lang["101"] = 'جمهوری اسلامی ایران'; // Iran (Islamic Republic of)
$_country_lang["102"] = 'عراق'; // Iraq
$_country_lang["103"] = 'ایرلند'; // Ireland
$_country_lang["104"] = 'فلسطین'; // Occupied Palestine
$_country_lang["105"] = 'ایتالیا'; // Italy
$_country_lang["106"] = 'جامائیکا'; // Jamaica
$_country_lang["107"] = 'ژاپن'; // Japan
$_country_lang["108"] = 'اردن'; // Jordan
$_country_lang["109"] = 'قزاقستان'; // Kazakhstan
$_country_lang["110"] = 'کنیا'; // Kenya
$_country_lang["111"] = 'کیریباتی'; // Kiribati
$_country_lang["112"] = 'جمهوی خلق کره'; // Korea, Democratic People's Republic of
$_country_lang["113"] = 'جمهوری کره'; // Korea, Republic of
$_country_lang["114"] = 'کویت'; // Kuwait
$_country_lang["115"] = 'قرقیزستان'; // Kyrgyzstan
$_country_lang["116"] = 'جمهوری خلق لائو'; // Lao People's Democratic Republic
$_country_lang["117"] = 'لاتویا'; // Latvia
$_country_lang["118"] = 'لبنان'; // Lebanon
$_country_lang["119"] = 'لسوتو'; // Lesotho
$_country_lang["120"] = 'لیبریا'; // Liberia
$_country_lang["121"] = 'جمهوری عربی لیبی'; // Libyan Arab Jamahiriya
$_country_lang["122"] = 'لینخن اشتاین'; // Liechtenstein
$_country_lang["123"] = 'لیتوانی'; // Lithuania
$_country_lang["124"] = 'لوگزامبورگ'; // Luxembourg
$_country_lang["125"] = 'ماکائو'; // Macau
$_country_lang["126"] = 'مقدونیه - جمهوری سابق یوگسلاوی'; // Macedonia, The Former Yugoslav Republic of
$_country_lang["127"] = 'ماداگاسکار'; // Madagascar
$_country_lang["128"] = 'ملاوی'; // Malawi
$_country_lang["129"] = 'مالزی'; // Malaysia
$_country_lang["130"] = 'مالدیو'; // Maldives
$_country_lang["131"] = 'مالی'; // Mali
$_country_lang["132"] = 'مالتا'; // Malta
$_country_lang["133"] = 'جزایر مارشال'; // Marshall Islands
$_country_lang["134"] = 'مارتینیک'; // Martinique
$_country_lang["135"] = 'موریتانی'; // Mauritania
$_country_lang["136"] = 'موریس'; // Mauritius
$_country_lang["137"] = 'مایوت'; // Mayotte
$_country_lang["138"] = 'مکزیک'; // Mexico
$_country_lang["139"] = 'ایالت فدرال میکرونسیا'; // Micronesia, Federated States of
$_country_lang["140"] = 'جمهوری مولدوا'; // Moldova, Republic of
$_country_lang["141"] = 'موناکو'; // Monaco
$_country_lang["142"] = 'مغولستان'; // Mongolia
$_country_lang["241"] = 'مونته نگرو'; //Montenegro
$_country_lang["143"] = 'مونسترات'; // Montserrat
$_country_lang["144"] = 'مراکش'; // Morocco
$_country_lang["145"] = 'موزامبیک'; // Mozambique
$_country_lang["146"] = 'میانمار'; // Myanmar
$_country_lang["147"] = 'نامیبیا'; // Namibia
$_country_lang["148"] = 'نائورو'; // Nauru
$_country_lang["149"] = 'نپال'; // Nepal
$_country_lang["150"] = 'هلند'; // Netherlands
$_country_lang["151"] = 'ندرلند آنتیله'; // Netherlands Antilles
$_country_lang["152"] = 'نیو کالدونیا'; // New Caledonia
$_country_lang["153"] = 'نیو زلند'; // New Zealand
$_country_lang["154"] = 'نیکاراگوئه'; // Nicaragua
$_country_lang["155"] = 'نیجر'; // Niger
$_country_lang["156"] = 'نیجریه'; // Nigeria
$_country_lang["157"] = 'نیئو'; // Niue
$_country_lang["158"] = 'جزایر نورفولک'; // Norfolk Island
$_country_lang["159"] = 'جزایر شمالی ماریانا'; // Northern Mariana Islands
$_country_lang["160"] = 'نروژ'; // Norway
$_country_lang["161"] = 'عمان'; // Oman
$_country_lang["162"] = 'پاکستان'; // Pakistan
$_country_lang["163"] = 'پالائو'; // Palau
$_country_lang["164"] = 'پاناما'; // Panama
$_country_lang["165"] = 'پاپائو گینه نو'; // Papua New Guinea
$_country_lang["166"] = 'پاراگوئه'; // Paraguay
$_country_lang["167"] = 'پرو'; // Peru
$_country_lang["168"] = 'فیلیپین'; // Philippines
$_country_lang["169"] = 'پیتکارین'; // Pitcairn
$_country_lang["170"] = 'لهستان'; // Poland
$_country_lang["171"] = 'پرتغال'; // Portugal
$_country_lang["172"] = 'پورته ریکو'; // Puerto Rico
$_country_lang["173"] = 'قطر'; // Qatar
$_country_lang["174"] = 'ریونیون'; // Reunion
$_country_lang["175"] = 'رومانی'; // Romania
$_country_lang["176"] = 'فدراسیون روسیه'; // Russian Federation
$_country_lang["177"] = 'رواندا'; // Rwanda
$_country_lang["178"] = 'سینت کیتس و نئیوس'; // Saint Kitts and Nevis
$_country_lang["179"] = 'سینت لوسیا'; // Saint Lucia
$_country_lang["180"] = 'سینت وینسنت و گرینیداد'; // Saint Vincent and the Grenadines
$_country_lang["181"] = 'ساموئا'; // Samoa
$_country_lang["182"] = 'سن مارینو'; // San Marino
$_country_lang["183"] = 'سائو تام و پرینسیپ'; // Sao Tome and Principe
$_country_lang["184"] = 'عربستان سعودی'; // Saudi Arabia
$_country_lang["185"] = 'سنگال'; // Senegal
$_country_lang["240"] = 'صربستان'; //Serbia
$_country_lang["186"] = 'سچیلس'; // Seychelles
$_country_lang["187"] = 'سیرالئون'; // Sierra Leone
$_country_lang["188"] = 'سنگاپور'; // Singapore
$_country_lang["189"] = 'جمهوری اسلواکی'; // Slovakia (Slovak Republic)
$_country_lang["190"] = 'اسلوانی'; // Slovenia
$_country_lang["191"] = 'جزایر سولومون'; // Solomon Islands
$_country_lang["192"] = 'سومالی'; // Somalia
$_country_lang["193"] = 'آفریقای جنوبی'; // South Africa
$_country_lang["194"] = 'گرجستان و جزایر جنوبی سندویچ'; // South Georgia and the South Sandwich Islands
$_country_lang["195"] = 'اسپانیا'; // Spain
$_country_lang["196"] = 'سری لانکا'; // Sri Lanka
$_country_lang["197"] = 'ست هلنا'; // St. Helena
$_country_lang["198"] = 'ست پیر و میکولون'; // St. Pierre and Miquelon
$_country_lang["199"] = 'سودان'; // Sudan
$_country_lang["200"] = 'سورینام'; // Suriname
$_country_lang["201"] = 'جزایر والبارد و یان ماین'; // Svalbard and Jan Mayen Islands
$_country_lang["202"] = 'سوازیلند'; // Swaziland
$_country_lang["203"] = 'سوئد'; // Sweden
$_country_lang["204"] = 'سوئیتزلند'; // Switzerland
$_country_lang["205"] = 'جمهوری عربی سوریه'; // Syrian Arab Republic
$_country_lang["206"] = 'تایوان'; // Taiwan
$_country_lang["207"] = 'تاجیکستان'; // Tajikistan
$_country_lang["208"] = 'ایالات متحده تانزانیا'; // Tanzania, United Republic of
$_country_lang["209"] = 'تایلند'; // Thailand
$_country_lang["210"] = 'توگو'; // Togo
$_country_lang["211"] = 'توکلائو'; // Tokelau
$_country_lang["212"] = 'تونگا'; // Tonga
$_country_lang["213"] = 'ترینیداد و توباگو'; // Trinidad and Tobago
$_country_lang["214"] = 'تونس'; // Tunisia
$_country_lang["215"] = 'ترکیه'; // Turkey
$_country_lang["216"] = 'ترکمنستان'; // Turkmenistan
$_country_lang["217"] = 'جزایر ترک و سیکاکوس'; // Turks and Caicos Islands
$_country_lang["218"] = 'توالا'; // Tuvalu
$_country_lang["219"] = 'اوگاندا'; // Uganda
$_country_lang["220"] = 'اوکراین'; // Ukraine
$_country_lang["221"] = 'امارات متحده عربی'; // United Arab Emirates
$_country_lang["222"] = 'انگلستان'; // United Kingdom
$_country_lang["223"] = 'ایالات متحده امریکا'; // United States
$_country_lang["224"] = 'جزایر اقلیت اوتلین'; // United States Minor Outlying Islands
$_country_lang["225"] = 'اوروگوئه'; // Uruguay
$_country_lang["226"] = 'ازبکستان'; // Uzbekistan
$_country_lang["227"] = 'وانواتو'; // Vanuatu
$_country_lang["228"] = 'دریای مقدس شهر ایالت واتیکان'; // Vatican City State (Holy See)
$_country_lang["229"] = 'ونزوئلا'; // Venezuela
$_country_lang["230"] = 'ویت نام'; // Viet Nam
$_country_lang["231"] = 'جزایر ویرجین (انگلیس)'; // Virgin Islands (British)
$_country_lang["232"] = 'جزایر ویرجین (امریکا)'; // Virgin Islands (U.S.)
$_country_lang["233"] = 'جزایر والیس و فوتانا'; // Wallis and Futuna Islands
$_country_lang["234"] = 'صحرای غربی'; // Western Sahara
$_country_lang["235"] = 'یمن'; // Yemen
$_country_lang["236"] = 'صربستان و مونته نگرو'; // DEPRECATED: kept for backwards compatibility, Serbia and Montenegro
$_country_lang["237"] = 'جمهوری دموکراتیک کونگو'; // Congo, Democratic Republic of the
$_country_lang["238"] = 'زامبیا'; // Zambia
$_country_lang["239"] = 'زیمبابوه'; // Zimbabwe
?><file_sep>/assets/snippets/weblogin/webchangepwd.inc.php
<?php
# WebChangePwd 1.0
# Created By <NAME>, 2005
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
defined('IN_PARSER_MODE') or die();
# load tpl
if(is_numeric($tpl)) $tpl = ($doc=$modx->getDocuments($tpl)) ? $doc['content']:"Document '$tpl' not found.";
else if($tpl) $tpl = ($chunk=$modx->getChunk($tpl)) ? $chunk:"Chunk '$tpl' not found.";
if(!$tpl) $tpl = getWebChangePwdtpl();
// extract declarations
$declare = webLoginExtractDeclarations($tpl);
$tpls = explode((isset($declare["separator"]) ? $declare["separator"]:"<!--tpl_separator-->"),$tpl);
if(!$isPostBack && isset($_SESSION['webValidated'])){
// display password screen
$tpl = $tpls[0];
$tpl = str_replace("[+action+]",$modx->makeUrl($modx->documentIdentifier),$tpl);
$tpl.="<script type='text/javascript'>
if (document.changepwdfrm) document.changepwdfrm.oldpassword.focus();
</script>";
$output .= $tpl;
}
else if ($isPostBack && isset($_SESSION['webValidated'])){
$oldpassword = $_POST['oldpassword'];
$genpassword = $_POST['newpassword'];
$passwordgenmethod = $_POST['passwordgenmethod'];
$passwordnotifymethod = $_POST['passwordnotifymethod'];
$specifiedpassword = $_POST['specifiedpassword'];
$uid = $modx->getLoginUserID();
$type = $modx->getLoginUserType();
// load template
$tpl = $tpls[0];
$tpl = str_replace("[+action+]",$modx->makeUrl($modx->documentIdentifier),$tpl);
$tpl.="<script type='text/javascript'>if (document.changepwdfrm) document.changepwdfrm.oldpassword.focus();</script>";
// get user record
if($type=='manager') $ds = $modx->getUserInfo($uid);
else $ds = $modx->getWebUserInfo($uid);
// verify password
if($ds['password']==md5($oldpassword)) {
// verify password
if ($passwordgenmethod=="spec" && $_POST['specifiedpassword']!=$_POST['confirmpassword']) {
$output = webLoginAlert("Password typed is mismatched",1).$tpl;
return;
}
// generate a new password for this user
if($specifiedpassword!="" && $passwordgenmethod=="spec") {
if(strlen($specifiedpassword) < 6 ) {
$output = webLoginAlert("Password is too short!").$tpl;
return;
} else {
$newpassword = $specifiedpassword;
}
} elseif($specifiedpassword=="" && $passwordgenmethod=="spec") {
$output = webLoginAlert("You didn't specify a password for this user!").$tpl;
return;
} elseif($passwordgenmethod=='g') {
$newpassword = webLoginGeneratePassword(8);
} else {
$output = webLoginAlert("No password generation method specified!").$tpl;
return;
}
// handle notification
if($passwordnotifymethod=='e') {
$rt = webLoginSendNewPassword($ds["email"],$ds["username"],$newpassword,$ds["fullname"]);
if($rt!==true) { // an error occured
$output = $rt.$tpl;
return;
}
else {
$newpassmsg = "A copy of the new password was sent to your email address.";
}
}
else {
$newpassmsg = "The new password is <b>$newpassword</b>.";
}
// save new password to database
$rt = $modx->changeWebUserPassword($oldpassword,$newpassword);
if($rt!==true) {
$output = webLoginAlert("An error occured while saving new password: $rt");
return;
}
// display change notification
$tpl = $tpls[1];
$tpl = str_replace("[+newpassmsg+]",$newpassmsg,$tpl);
$output .= $tpl;
}
else {
$output = webLoginAlert("Incorrect password. Please try again.").$tpl;
return;
}
}
// Returns Default WebChangePwd tpl
function getWebChangePwdtpl(){
ob_start();
?>
<!-- #declare:separator <hr> -->
<!-- login form section-->
<form method="post" name="changepwdfrm" action="[+action+]" style="margin: 0px; padding: 0px;">
<table border="0" cellpadding="1" width="300">
<tr>
<td><fieldset style="width:300px">
<legend><b>Enter your current password</b></legend>
<table border="0" cellpadding="0" style="margin-left:20px;">
<tr>
<td style="padding:0px 0px 0px 0px;">
<label for="oldpassword" style="width:120px">Current password:</label>
</td>
<td style="padding:0px 0px 0px 0px;">
<input type="password" name="oldpassword" size="20" /><br />
</td>
</tr>
</table>
</fieldset> <fieldset style="width:300px">
<legend><b>New password method</b></legend>
<input type="radio" name="passwordgenmethod" value="g" checked />Let this website
generate a password.<br />
<input type="radio" name="passwordgenmethod" value="spec" />Let me specify
the password:<br />
<div style="padding-left:20px">
<table border="0" cellpadding="0">
<tr>
<td style="padding:0px 0px 0px 0px;">
<label for="specifiedpassword" style="width:120px">New password:</label>
</td>
<td style="padding:0px 0px 0px 0px;">
<input type="password" name="specifiedpassword" onchange="documentdirty=true;" onkeypress="document.changepwdfrm.passwordgenmethod[1].checked=true;" size="20" /><br />
</td>
</tr>
<tr>
<td style="padding:0px 0px 0px 0px;">
<label for="confirmpassword" style="width:120px">Confirm password:</label>
</td>
<td style="padding:0px 0px 0px 0px;">
<input type="password" name="confirmpassword" onchange="documentdirty=true;" onkeypress="document.changepwdfrm.passwordgenmethod[1].checked=true;" size="20" /><br />
</td>
</tr>
</table>
<small><span class="warning" style="font-weight:normal">The password you
specify needs to be at least 6 characters long.</span></small>
</div>
</fieldset><br />
<fieldset style="width:300px">
<legend><b>Password notification method</b></legend>
<input type="radio" name="passwordnotifymethod" value="e" />Send the new password
by e-mail.<br />
<input type="radio" name="passwordnotifymethod" value="s" checked />Show the new password
on screen.
</fieldset></td>
</tr>
<tr>
<td align="right"><input type="submit" value="Submit" name="cmdwebchngpwd" />
<input type="reset" value="Reset" name="cmdreset" />
</td>
</tr>
</table>
</form>
<hr>
<!-- notification section -->
Your password was successfully changed.<br /><br />
[+newpassmsg+]
<?php
$t = ob_get_contents();
ob_end_clean();
return $t;
}
?><file_sep>/manager/media/style/MODxCarbon/style.php
<?php
/**
* Filename: media/style/MODxCarbon/style.php
* Function: Manager style variables for images and icons.
* Encoding: ISO-Latin-1
* Author: <NAME> updates by <NAME>
* icons by <NAME> of FamFamFam http://www.famfamfam.com/lab/icons/
* Date: 2006/05/30
* Version: 1.0
* MODx version: 1.0.0
*/
$style_path = "media/style/MODxCarbon/";
// Tree Menu Toolbar
$_style["add_doc_tree"] = "<img src=\"".$style_path."images/icons/folder_page_add.png\" style=\"width:16px;height:16px\" />";
$_style["add_weblink_tree"] = "<img src=\"".$style_path."images/icons/link_add.png\" style=\"width:16px;height:16px\" />";
$_style["collapse_tree"] = "<img src=\"".$style_path."images/icons/arrow_up.png\" style=\"width:16px;height:16px\" />";
$_style["empty_recycle_bin"] = "<img src=\"".$style_path."images/icons/trash_full.png\" style=\"width:16px;height:16px\" />";
$_style["empty_recycle_bin_empty"] = "<img src=\"".$style_path."images/icons/trash.png\" style=\"width:16px;height:16px\" />";
$_style["expand_tree"] = "<img src=\"".$style_path."images/icons/arrow_down.png\" style=\"width:16px;height:16px\" />";
$_style["hide_tree"] = "<img src=\"".$style_path."images/icons/application_side_contract.png\" style=\"width:16px;height:16px\" />";
$_style["refresh_tree"] = "<img src=\"".$style_path."images/icons/refresh.png\" style=\"width:16px;height:16px\" />";
$_style["show_tree"] = $style_path."images/icons/application_side_expand.png";
$_style["sort_tree"] = "<img src=\"".$style_path."images/icons/sort.png\" style=\"width:16px;height:16px\" />";
// Tree Icons
$_style["tree_deletedpage"] = $style_path."images/tree/deletedpage.gif";
$_style["tree_folder"] = $style_path."images/tree/application_double.png"; /* folder.gif */
$_style["tree_folderopen"] = $style_path."images/tree/application_double.png"; /* folder-open.gif */
$_style["tree_folder_secure"] = $style_path."images/tree/application_double_key.png";
$_style["tree_folderopen_secure"] = $style_path."images/tree/application_double_key.png";
$_style["tree_globe"] = $style_path."images/tree/globe.gif";
$_style["tree_linkgo"] = $style_path."images/tree/link_go.png";
$_style["tree_minusnode"] = $style_path."images/tree/minusnode.gif";
$_style["tree_page"] = $style_path."images/tree/application.png";
$_style["tree_page_secure"] = $style_path."images/tree/application_key.png";
$_style["tree_page_blank"] = $style_path."images/tree/application.png";
$_style["tree_page_css"] = $style_path."images/tree/application_css.png";
$_style["tree_page_html"] = $style_path."images/tree/application_html.png";
$_style["tree_page_xml"] = $style_path."images/tree/application_xml.png";
$_style["tree_page_js"] = $style_path."images/tree/application_js.png";
$_style["tree_page_rss"] = $style_path."images/tree/application_rss.png";
$_style["tree_page_pdf"] = $style_path."images/tree/application_pdf.png";
$_style["tree_page_word"] = $style_path."images/tree/application_word.png";
$_style["tree_page_excel"] = $style_path."images/tree/application_excel.png";
$_style["tree_plusnode"] = $style_path."images/tree/plusnode.gif";
$_style["tree_showtree"] = '<img src="'.$style_path.'images/tree/sitemap.png" width="16" height="16" align=\"absmiddle\" />';
$_style["tree_weblink"] = $style_path."images/tree/link_go.png";
// Icons
$_style["icons_add"] = $style_path."images/icons/save.png";
$_style["icons_cal"] = $style_path."images/icons/cal.gif";
$_style["icons_cal_nodate"] = $style_path."images/icons/cal_nodate.gif";
$_style["icons_cancel"] = $style_path."images/icons/stop.png";
$_style["icons_close"] = $style_path."images/icons/stop.png";
$_style["icons_delete"] = $style_path."images/icons/delete.png";
$_style["icons_delete_document"] = $style_path."images/icons/delete.png";
$_style["icons_document_overview"] = $style_path."images/icons/page_white_magnify.png";
$_style["icons_duplicate_document"] = $style_path."images/icons/page_white_copy.png";
$_style["icons_edit_document"] = $style_path."images/icons/save.png";
$_style["icons_email"] = $style_path."images/icons/email.gif";
$_style["icons_folder"] = $style_path."images/icons/folder.gif";
$_style["icons_home"] = $style_path."images/icons/home.gif";
$_style["icons_information"] = $style_path."images/icons/information.png";
$_style["icons_loading_doc_tree"] = $style_path."images/icons/information.png"; // top bar
$_style["icons_mail"] = $style_path."images/icons/email.png"; // top bar
$_style["icons_message_forward"] = $style_path."images/icons/forward.gif";
$_style["icons_message_reply"] = $style_path."images/icons/reply.gif";
$_style["icons_modules"] = $style_path."images/icons/modules.gif";
$_style["icons_move_document"] = $style_path."images/icons/page_white_go.png";
$_style["icons_new_document"] = $style_path."images/icons/page_white_add.png";
$_style["icons_new_weblink"] = $style_path."images/icons/world_link.png";
$_style["icons_preview_document"] = $style_path."images/icons/page_white_magnify.png";
$_style["icons_publish_document"] = $style_path."images/icons/clock_play.png";
$_style["icons_refresh"] = $style_path."images/icons/refresh.png";
$_style["icons_save"] = $style_path."images/icons/save.png";
$_style["icons_set_parent"] = $style_path."images/icons/layout_go.png";
$_style["icons_table"] = $style_path."images/icons/table.gif";
$_style["icons_undelete_document"] = $style_path."images/icons/b092.gif";
$_style["icons_unpublish_document"] = $style_path."images/icons/clock_stop.png";
$_style["icons_user"] = $style_path."images/icons/user.gif";
$_style["icons_weblink"] = $style_path."images/icons/world_link.png";
$_style["icons_working"] = $style_path."images/icons/exclamation.png"; // top bar
// Tabs
$_style["icons_tab_preview"] = $style_path."images/icons/preview.gif";
// Indicators
$_style["icons_tooltip"] = $style_path."images/icons/b02.gif";
$_style["icons_tooltip_over"] = $style_path."images/icons/b02_trans.gif";
// Large Icons
$_style["icons_backup_large"] = $style_path."images/icons/backup.gif";
$_style["icons_mail_large"] = $style_path."images/icons/mail_generic.gif";
$_style["icons_modules_large"] = $style_path."images/icons/modules.gif";
$_style["icons_resources_large"] = $style_path."images/icons/resources.gif";
$_style["icons_security_large"] = $style_path."images/icons/security.gif";
$_style["icons_webusers_large"] = $style_path."images/icons/web_users.gif";
// Miscellaneous
$_style["ajax_loader"] = "<p>".$_lang['loading_page']."</p><p><img src=\"".$style_path."images/misc/ajax-loader.gif\" alt=\"Please wait\" /></p>";
$_style["modx_logo"] = $style_path."images/misc/logo.png";
$_style["spacer"] = $style_path."images/misc/spacer.gif";
$_style["tx"] = $style_path."images/misc/_tx_.gif";
$_style["icons_right_arrow"] = $style_path."images/icons/circlerightarrow.gif";
// Credits
$_style["credits_dTree"] = $style_path."images/credits/dtree.gif";
$_style["credits_everaldo"] = $style_path."images/credits/penguin.gif";
$_style["credits_mysql"] = $style_path."images/credits/mysql.gif";
$_style["credits_php"] = $style_path."images/credits/php.gif";
$_style["credits_webfx"] = $style_path."images/credits/webfx.gif";
?><file_sep>/install/connection.databasetest.php
<?php
$host = $_POST['host'];
$uid = $_POST['uid'];
$pwd = $_POST['pwd'];
$installMode = $_POST['installMode'];
require_once("lang.php");
$output = $_lang["status_checking_database"];
if (!$conn = @ mysql_connect($host, $uid, $pwd)) {
$output .= '<span style="color:#FF0000;">'.$_lang['status_failed'].'</span>';
}
else {
$database_name = $_POST['database_name'];
$database_name = str_replace("`", "", $database_name);
$tableprefix = $_POST['tableprefix'];
$database_collation = $_POST['database_collation'];
$database_connection_method = $_POST['database_connection_method'];
if (!@ mysql_select_db($database_name, $conn)) {
// create database
$database_charset = substr($database_collation, 0, strpos($database_collation, '_'));
$query = "CREATE DATABASE `".$database_name."` CHARACTER SET ".$database_charset." COLLATE ".$database_collation.";";
if (!@ mysql_query($query)){
$output .= '<span style="color:#FF0000;">'.$_lang['status_failed_could_not_create_database'].'</span>';
}
else {
$output .= '<span style="color:#80c000;">'.$_lang['status_passed_database_created'].'</span>';
}
}
elseif (($installMode == 0) && (@ mysql_query("SELECT COUNT(*) FROM {$database_name}.`{$tableprefix}site_content`"))) {
$output .= '<span style="color:#FF0000;">'.$_lang['status_failed_table_prefix_already_in_use'].'</span>';
}
elseif (($database_connection_method != 'SET NAMES') && ($rs = @ mysql_query("show variables like 'collation_database'")) && ($row = @ mysql_fetch_row($rs)) && ($row[1] != $database_collation)) {
$output .= '<span style="color:#FF0000;">'.sprintf($_lang['status_failed_database_collation_does_not_match'], $row[1]).'</span>';
}
else {
$output .= '<span style="color:#80c000;">'.$_lang['status_passed'].'</span>';
}
}
echo $output;
?><file_sep>/manager/actions/search.static.php
<?php
if(IN_MANAGER_MODE!="true") die("<b>INCLUDE_ORDERING_ERROR</b><br /><br />Please use the MODx Content Manager instead of accessing this file directly.");
unset($_SESSION['itemname']); // clear this, because it's only set for logging purposes
?>
<h1><?php echo $_lang['search_criteria']; ?></h1>
<div class="sectionBody">
<form action="index.php?a=71" method="post" name="searchform">
<table width="100%" border="0">
<tr>
<td width="120"><?php echo $_lang['search_criteria_id']; ?></td>
<td width="20"> </td>
<td width="120"><input name="searchid" type="text"></td>
<td><?php echo $_lang['search_criteria_id_msg']; ?></td>
</tr>
<tr>
<td><?php echo $_lang['search_criteria_title']; ?></td>
<td> </td>
<td><input name="pagetitle" type="text"></td>
<td><?php echo $_lang['search_criteria_title_msg']; ?></td>
</tr>
<tr>
<td><?php echo $_lang['search_criteria_longtitle']; ?></td>
<td> </td>
<td><input name="longtitle" type="text"></td>
<td><?php echo $_lang['search_criteria_longtitle_msg']; ?></td>
</tr>
<tr>
<td><?php echo $_lang['search_criteria_content']; ?></td>
<td> </td>
<td><input name="content" type="text"></td>
<td><?php echo $_lang['search_criteria_content_msg']; ?></td>
</tr>
<tr>
<td colspan="4">
<ul class="actionButtons">
<li><a href="#" onclick="document.searchform.submitok.click();"><img src="<?php echo $_style["icons_save"] ?>" /> <?php echo $_lang['search'] ?></a></li>
<li><a href="index.php?a=2"><img src="<?php echo $_style["icons_cancel"] ?>" /> <?php echo $_lang['cancel'] ?></a></li>
</ul>
</td>
</tr>
</table>
<input type="submit" value="Search" name="submitok" style="display:none">
</form>
</div>
<?php
if(isset($_REQUEST['submitok'])) {
$searchid = !empty($_REQUEST['searchid']) ? intval($_REQUEST['searchid']) : 0;
$searchtitle = htmlentities($_POST['pagetitle'], ENT_QUOTES, $modx_manager_charset);
$searchcontent = $modx->db->escape($_REQUEST['content']);
$searchlongtitle = $modx->db->escape($_REQUEST['longtitle']);
$sqladd .= $searchid!="" ? " AND $dbase.`".$table_prefix."site_content`.id='$searchid' " : "" ;
$sqladd .= $searchtitle!="" ? " AND $dbase.`".$table_prefix."site_content`.pagetitle LIKE '%$searchtitle%' " : "" ;
$sqladd .= $searchlongtitle!="" ? " AND $dbase.`".$table_prefix."site_content`.longtitle LIKE '%$searchlongtitle%' " : "" ;
$sqladd .= $searchcontent!="" ? " AND $dbase.`".$table_prefix."site_content`.content LIKE '%$searchcontent%' " : "" ;
$sql = "SELECT id, pagetitle, description, deleted, published, isfolder, type FROM $dbase.`".$table_prefix."site_content` where 1=1 ".$sqladd." ORDER BY id;";
$rs = $modx->db->query($sql);
$limit = $modx->db->getRecordCount($rs);
?>
<div class="sectionHeader"><?php echo $_lang['search_results']; ?></div><div class="sectionBody">
<?php
if($limit<1) {
echo $_lang['search_empty'];
} else {
printf($_lang['search_results_returned_msg'], $limit);
?>
<script type="text/javascript" src="media/script/tablesort.js"></script>
<table border=0 cellpadding=2 cellspacing=0 class="sortabletable sortable-onload-2 rowstyle-even" id="table-1" width="90%">
<thead>
<tr bgcolor='#CCCCCC'>
<th width="20"></th>
<th class="sortable"><b><?php echo $_lang['search_results_returned_id']; ?></b></th>
<th class="sortable"><b><?php echo $_lang['search_results_returned_title']; ?></b></th>
<th class="sortable"><b><?php echo $_lang['search_results_returned_desc']; ?></b></th>
<th width="20"></th>
<th width="20"></th>
</tr>
</thead>
<tbody>
<?php
for ($i = 0; $i < $limit; $i++) {
$logentry = $modx->db->getRow($rs);
// figure out the icon for the document...
$icon = "";
if($logentry['published']==0) {
$icon .= "unpublished";
}
if($logentry['type']=='reference') {
$icon .= "weblink";
}
if($logentry['isfolder']==1) {
$icon .= "folder";
}
if($icon=="" || $icon=="unpublished") {
$icon .= "page";
}
?>
<tr>
<td align="center"><a href="index.php?a=3&id=<?php echo $logentry['id']; ?>"onMouseover="status='<?php echo $_lang['search_view_docdata']; ?>';return true;" onmouseout="status='';return true;" title="<?php echo $_lang['search_view_docdata']; ?>"><img src="media/style/<?php echo $manager_theme ? "$manager_theme/":""; ?>images/icons/context_view.gif" border=0></a></td>
<td><?php echo $logentry['id']; ?></td>
<?php if (function_exists('mb_strlen') && function_exists('mb_substr')) {?>
<td><?php echo mb_strlen($logentry['pagetitle'], $modx_manager_charset)>20 ? mb_substr($logentry['pagetitle'], 0, 20, $modx_manager_charset)."..." : $logentry['pagetitle'] ; ?></td>
<td><?php echo mb_strlen($logentry['description'], $modx_manager_charset)>35 ? mb_substr($logentry['description'], 0, 35, $modx_manager_charset)."..." : $logentry['description'] ; ?></td>
<?php } else { ?>
<td><?php echo strlen($logentry['pagetitle'])>20 ? substr($logentry['pagetitle'], 0, 20)."..." : $logentry['pagetitle'] ; ?></td>
<td><?php echo strlen($logentry['description'])>35 ? substr($logentry['description'], 0, 35)."..." : $logentry['description'] ; ?></td>
<?php } ?>
<td align="center"><?php echo $logentry['deleted']==1 ? "<img align='absmiddle' src='media/style/".($manager_theme ? $manager_theme.'/' : '')."images/icons/trash_full.png' alt='".$_lang['search_item_deleted']."'>" : ""; ?></td>
<td align="center"><img align='absmiddle' src='media/style/<?php echo $manager_theme ? "$manager_theme/":""; ?>images/tree/<?php echo $icon; ?>.gif'></td>
</tr>
<?php
}
?>
</tbody>
</table>
<?php
}
?>
</div>
<?php
}
?>
<file_sep>/assets/plugins/tinymce3241/tinymce.linklist.flat.php
<?php
include_once "../../../manager/includes/config.inc.php";
$allpages = getAllPages();
foreach($allpages as $page){
$caption = ($page['pagetitle'])?htmlspecialchars($page['pagetitle'],ENT_QUOTES):htmlspecialchars($page['menutitle'],ENT_QUOTES);
$list .=($list!='')?",\n":"\n";
$list.= "[\"".$caption." (".$page['id'].")"."\", \"[\"+\"~".$page['id']."~\"+\"]\"]";
}
$output = "var tinyMCELinkList = new Array(\n". $list .");";
echo $output;
function getAllPages($id=0, $sort='menuindex', $dir='ASC', $fields='pagetitle, id, menutitle') {
global $database_type;
global $database_server;
global $database_user;
global $database_password;
global $dbase;
global $table_prefix;
$tblsc = $dbase.".`".$table_prefix."site_content`";
$tbldg = $dbase.".`".$table_prefix."document_groups`";
// modify field names to use sc. table reference
$fields = 'sc.'.implode(',sc.',preg_replace("/^\s/i","",explode(',',$fields)));
$sort = 'sc.'.implode(',sc.',preg_replace("/^\s/i","",explode(',',$sort)));
// Connecting, selecting database
$link = mysql_connect($database_server, $database_user, $database_password) or die('Could not connect: ' . mysql_error());
mysql_select_db(str_replace('`', '', $dbase)) or die('Could not select database');
@mysql_query("{$GLOBALS['database_connection_method']} {$GLOBALS['database_connection_charset']}");
$sql = "SELECT DISTINCT $fields FROM $tblsc sc
LEFT JOIN $tbldg dg on dg.document = sc.id
WHERE sc.published=1 AND sc.deleted=0
ORDER BY $sort $dir;";
$result = mysql_query($sql) or die('Query failed: ' . mysql_error());
$resourceArray = array();
for($i=0;$i<@mysql_num_rows($result);$i++) {
array_push($resourceArray,mysql_fetch_assoc($result));
}
// Free resultset
mysql_free_result($result);
// Closing connection
mysql_close($link);
sort($resourceArray);
return $resourceArray;
}
?>
<file_sep>/manager/media/browser/mcpuk/browser.php
<!--
* FCKeditor - The text editor for internet
* Copyright (C) 2003-2005 <NAME>
*
* Licensed under the terms of the GNU Lesser General Public License:
* http://www.opensource.org/licenses/lgpl-license.php
*
* For further information visit:
* http://www.fckeditor.net/
*
* File Name: browser.html
* This page compose the File Browser dialog frameset.
*
* File Authors:
* <NAME> (<EMAIL>)
-->
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
<html>
<head>
<title>FCKeditor - Resources Browser</title>
<link href="browser.css" type="text/css" rel="stylesheet">
<?php
if(($_GET['editor'] == 'tinymce3' || $_GET['editor'] == 'tinymce') && $_GET['editorpath']){
$editorPath = htmlspecialchars($_GET['editorpath'], ENT_QUOTES);
?>
<script language="javascript" type="text/javascript" src="<?php echo $editorPath; ?>/jscripts/tiny_mce/tiny_mce_popup.js"></script>
<script language="javascript" type="text/javascript" src="<?php echo $editorPath; ?>/tinymce.modxfb.js"></script>
<?php
}else{
?>
<script language="javascript">
function SetUrl(fileUrl){
window.top.opener.SetUrl(fileUrl);
window.top.close();
window.top.opener.focus();
}
</script>
<?php
}
?>
</head>
<frameset cols="150,*" framespacing="0" bordercolor="#f1f1e3" frameborder="no" class="Frame_none">
<frameset rows="50,*" framespacing="0" class="Frame_r">
<frame src="frmresourcetype.html" scrolling="no" frameborder="no">
<frame name="frmFolders" id="frmFolders" src="frmfolders.html" scrolling="auto" frameborder="no">
</frameset>
<frameset rows="50,*,50" framespacing="0" class="Frame_none">
<frame name="frmActualFolder" src="frmactualfolder.html" scrolling="no" frameborder="no">
<frame name="frmResourcesList" id="mainWindow" src="frmresourceslist.html" scrolling="auto" frameborder="no">
<frameset cols="150,*,0" framespacing="0" frameborder="no" class="Frame_t">
<frame name="frmCreateFolder" id="frmCreateFolder" src="frmcreatefolder.html" scrolling="no" frameborder="no">
<frame name="frmUpload" id="frmUpload" src="frmupload.html" scrolling="no" frameborder="no">
<frame name="frmUploadWorker" src="" scrolling="no" frameborder="no">
</frameset>
</frameset>
</frameset>
</html><file_sep>/assets/plugins/tinymce3241/tinymce.lang.php
<?php
global $tinyLang;
$tinyLang = array();
$tinyLang[] = array("czech","cs");
$tinyLang[] = array("danish","da");
$tinyLang[] = array("english","en");
$tinyLang[] = array("english-british","en");
$tinyLang[] = array("finnish","fi");
$tinyLang[] = array("francais","fr");
$tinyLang[] = array("francais-utf8","fr");
$tinyLang[] = array("german","de");
$tinyLang[] = array("italian","it");
$tinyLang[] = array("japanese-utf8","ja");
$tinyLang[] = array("nederlands","nl");
$tinyLang[] = array("norsk","nn");
$tinyLang[] = array("persian","fa");
$tinyLang[] = array("polish","pl");
$tinyLang[] = array("portuguese","pt");
$tinyLang[] = array("russian","ru");
$tinyLang[] = array("russian-UTF8","ru");
$tinyLang[] = array("simple_chinese-gb2312","zh");
$tinyLang[] = array("spanish","es");
$tinyLang[] = array("svenska","sv");
$tinyLang[] = array("svenska-utf8","sv");
global $tinyLangCount;
$tinyLangCount = count($tinyLang);
if (!function_exists('getTinyMCELang')) {
function getTinyMCELang($lang){
global $tinyLang;
global $tinyLangCount;
$langSel = 'en';
for ($i=0;$i<$tinyLangCount;$i++) {
if($tinyLang[$i][0] == $lang){
$langSel = $tinyLang[$i][1];
}
}
return $langSel;
}
}
?><file_sep>/assets/snippets/eform/lang/danish.inc.php
<?php
/**
* snippets/eform/english.inc.php
* English language file for eForm
*/
$_lang["ef_thankyou_message"] = "<h3>Tak for det!</h3><p>Dine informationer blev succesfuldt sendt.</p>";
$_lang["ef_no_doc"] = "Dokument eller chunk blev ikke fundet for skabelon id=";
//$_lang["ef_no_chunk"] = ""; //deprecated
$_lang["ef_validation_message"] = "<strong>Nogle fejl blev opdaget i din form:</strong><br />";
$_lang["ef_required_message"] = " Følgende påkrævede felter mangler: {fields}<br />";
$_lang["ef_invalid_number"] = " er ikke et lovligt nummer";
$_lang["ef_invalid_date"] = " er ikke en gyldig dato";
$_lang["ef_invalid_email"] = " er ikke en gyldig email adresse";
$_lang["ef_upload_exceeded"] = " har overskredet maksimum upload grænse.";
$_lang["ef_failed_default"] = "Ikke korrekt værdi";
$_lang["ef_failed_vericode"] = "Ugyldig verification kode.";
$_lang["ef_failed_range"] = "Værdi ikke i tilladte række";
$_lang["ef_failed_list"] = "Værdi ikke i liste af tilladte værdier";
$_lang["ef_failed_eval"] = "Værdi kunne ikke valideres";
$_lang["ef_failed_ereg"] = "Value kunne ikke valideres";
$_lang["ef_failed_upload"] = "Ukorrekt fil type.";
$_lang["ef_error_validation_rule"] = "Valideringsregel ikke genkendt";
$_lang["ef_tamper_attempt"] = "manipulationsforsøg opdaget!";
$_lang["ef_error_formid"] = "Ugyldig Form Id nummer eller navn.";
$_lang["ef_debug_info"] = "Debug info: ";
$_lang["ef_is_own_id"] = "<span class=\"ef-form-error\">Form skabelon sat til id af side indeholdende snippet kald! Du kan ikke have en form i det samme dokument som et snippet kald.</span> id=";
$_lang["ef_sql_no_result"] = " passerede helt stille validering. <span style=\"color:red;\"> SQL returnerede inge resultater!</span> ";
$_lang['ef_regex_error'] = 'Fejl i regulær expression ';
$_lang['ef_debug_warning'] = '<p style="color:red;"><span style="font-size:1.5em;font-weight:bold;">WARNING - DEBUGGING er ON</span> <br />Vær sikker på at du slår debugging off før du sender denne form live!</p>';
$_lang['ef_mail_abuse_subject'] = 'Potentiel email form misbrug opdaget for form id';
$_lang['ef_mail_abuse_message'] = '<p>En form på dit website kan være emne for et email injection forsøg. Detaljerne af de angivede værdier er printet nedenunder. Mistænkte tekst er blevet indeholdt i \[..]\ tags. </p>';
$_lang['ef_mail_abuse_error'] = '<strong>Ugyldig eller usikre indtastninger blev opdaget i din form</strong>.';
$_lang['ef_eval_deprecated'] = "#EVAL reglen er forældet og vil muligvis ikke fungerer i fremtidige versioner. Brug #FUNCTION istedet for.";
?>
<file_sep>/manager/includes/session_keepalive.php
<?php
/**
* session_keepalive.php
*
* This page is requested once in awhile to keep the session alive and kicking.
*/
require_once(dirname(__FILE__).'/protect.inc.php');
if ($rt = @ include_once('config.inc.php')) {
// Keep it alive
startCMSSession();
header('Location: ' . MODX_BASE_URL . 'manager/media/script/_session.gif?rnd=' . intval($_REQUEST['rnd']));
}
<file_sep>/manager/media/script/session.js
/*
* Small script to keep session alive in MODx
*/
/*
function keepSessionAlive() {
var img = new Image();
img.src = "includes/session_keepalive.php?rnd=" + new Date().getTime();
window.setTimeout('keepSessionAlive();', 1000 * 60);
}
keepSessionAlive();
*/
function keepMeAlive(imgName) {
myImg = self.mainMenu.document.getElementById(imgName);
if (myImg) myImg.src = myImg.src.replace(/\?.*$/, '?' + Math.random());
}
window.setInterval("keepMeAlive('keepAliveIMG')", 1000 * 60);<file_sep>/assets/plugins/tinymce3241/tinymce.functions.php
<?php
//TinyMCE RichText Editor Plugin v3.2.4.1
// getTinyMCESettings function
if (!function_exists('getTinyMCESettings')) {
function getTinyMCESettings($_lang, $path, $manager_language='english', $use_editor, $theme, $css, $plugins, $buttons1, $buttons2, $buttons3, $buttons4, $displayStyle, $action) {
// language settings
if (! @include_once($path.'/lang/'.$manager_language.'.inc.php')){
include_once($path.'/lang/english.inc.php');
}
// Check for previous 'full' theme setting for backwards compatibility
if($theme == "full"){
$theme == "editor";
}
if($action == 11 || $action == 12){
$themeOptions .= " <option value=\"\">".$_lang['tinymce_theme_global_settings']."</option>\n";
}
$arrThemes[] = array("simple",$_lang['tinymce_theme_simple']);
$arrThemes[] = array("advanced",$_lang['tinymce_theme_advanced']);
$arrThemes[] = array("editor",$_lang['tinymce_theme_editor']);
$arrThemes[] = array("custom",$_lang['tinymce_theme_custom']);
$arrThemesCount = count($arrThemes);
for ($i=0;$i<$arrThemesCount;$i++) {
$themeOptions .= " <option value=\"".$arrThemes[$i][0]."\"".($arrThemes[$i][0] == $theme ? " selected=\"selected\"" : "").">".$arrThemes[$i][1]."</option>\n";
}
$display = $use_editor==1 ? $displayStyle : 'none';
$css = isset($css) ? htmlspecialchars($css) : "";
return <<<TINYMCE_HTML
<table id='editorRow_TinyMCE' style="width:inherit;" border="0" cellspacing="0" cellpadding="3">
<tr class='row1' style="display: $display;">
<td colspan="2" class="warning" style="color:#707070; background-color:#eeeeee"><h4>{$_lang["tinymce_settings"]}</h4></td>
</tr>
<tr class='row1' style="display: $display">
<td nowrap class="warning"><b>{$_lang["tinymce_editor_theme_title"]}</b></td>
<td>
<select name="tinymce_editor_theme">
{$themeOptions}
</select>
</td>
</tr>
<tr class='row1' style="display: $display">
<td width="200"> </td>
<td class='comment'>{$_lang["tinymce_editor_theme_message"]}</td>
</tr>
<tr class='row1' style="display: $display">
<td colspan="2"><div class='split'></div></td>
</tr>
<tr class='row1' style="display:$display;">
<td nowrap class="warning"><b>{$_lang["tinymce_editor_custom_plugins_title"]}</b></td>
<td><input onChange="documentDirty=true;" type='text' maxlength='65000' style="width: 300px;" name="tinymce_custom_plugins" value="$plugins" />
</td>
</tr>
<tr class='row1' style="display: $display;">
<td width="200"> </td>
<td class='comment'>{$_lang["tinymce_editor_custom_plugins_message"]}</td>
</tr>
<tr class='row1' style="display: $display">
<td colspan="2"><div class='split'></div></td>
</tr>
<tr class='row1' style="display:$display;">
<td nowrap class="warning" valign="top"><b>{$_lang["tinymce_editor_custom_buttons_title"]}</b></td>
<td>
Row 1: <input onChange="documentDirty=true;" type='text' maxlength='65000' style="width: 300px;" name="tinymce_custom_buttons1" value="$buttons1" /><br/>
Row 2: <input onChange="documentDirty=true;" type='text' maxlength='65000' style="width: 300px;" name="tinymce_custom_buttons2" value="$buttons2" /><br/>
Row 3: <input onChange="documentDirty=true;" type='text' maxlength='65000' style="width: 300px;" name="tinymce_custom_buttons3" value="$buttons3" /><br/>
Row 4: <input onChange="documentDirty=true;" type='text' maxlength='65000' style="width: 300px;" name="tinymce_custom_buttons4" value="$buttons4" />
</td>
</tr>
<tr class='row1' style="display: $display;">
<td width="200"> </td>
<td class='comment'>{$_lang["tinymce_editor_custom_buttons_message"]}</td>
</tr>
<tr class='row1' style="display: $display">
<td colspan="2"><div class='split'></div></td>
</tr>
<tr class='row1' style="display:$display;">
<td nowrap class="warning"><b>{$_lang["tinymce_editor_css_selectors_title"]}</b></td>
<td><input onChange="documentDirty=true;" type='text' maxlength='65000' style="width: 300px;" name="tinymce_css_selectors" value="$css" />
</td>
</tr>
<tr class='row1' style="display: $display;">
<td width="200"> </td>
<td class='comment'>{$_lang["tinymce_editor_css_selectors_message"]}</td>
</tr>
</table>
TINYMCE_HTML;
}
}
// getTinyMCEScript function
if (!function_exists('getTinyMCEScript')) {
function getTinyMCEScript($elmList, $theme='simple', $width, $height, $language='en', $frontend, $base_url, $plugins, $buttons1, $buttons2, $buttons3, $buttons4, $disabledButtons, $blockFormats, $entity_encoding, $entities, $pathoptions, $cleanup, $resizing, $css_path, $css_selectors, $use_browser, $toolbar_align, $advimage_styles, $advlink_styles, $linklist, $customparams, $site_url, $tinyURL, $webuser) {
// Set theme
if($theme == "editor" || $theme == "custom" || $theme == "full"){
$tinyTheme = "advanced";
if(($theme == "editor" || $theme == "full") || ($theme == "custom" && (empty($plugins) || empty($buttons1)))){
$blockFormats = "p,h1,h2,h3,h4,h5,h6,div,blockquote,code,pre,address";
$plugins = "style,advimage,advlink,searchreplace,print,contextmenu,paste,fullscreen,nonbreaking,xhtmlxtras,visualchars,media";
$buttons1 = "undo,redo,selectall,separator,pastetext,pasteword,separator,search,replace,separator,nonbreaking,hr,charmap,separator,image,link,unlink,anchor,media,separator,cleanup,removeformat,separator,fullscreen,print,code,help";
$buttons2 = "bold,italic,underline,strikethrough,sub,sup,separator,blockquote,separator,bullist,numlist,outdent,indent,separator,justifyleft,justifycenter,justifyright,justifyfull,separator,styleselect,formatselect,separator,styleprops";
$buttons3 = "";
$buttons4 = "";
}
} else {
$tinyTheme = $theme;
}
// Set relative URL options
switch($pathoptions){
case "rootrelative":
$relative_urls = "false";
$convert_urls = true;
$remove_script_host = "true";
$document_base_url = " document_base_url : \"".$site_url."\",\n";
break;
case "docrelative":
$relative_urls = "true";
$convert_urls = true;
$document_base_url = " document_base_url : \"".$site_url."\",\n";
$remove_script_host = "true";
break;
case "fullpathurl":
$relative_urls = "false";
$remove_script_host = "false";
break;
default:
$relative_urls = "true";
$document_base_url = " document_base_url : \"".$site_url."\",\n";
$remove_script_host = "true";
}
$cssPath = !empty($css_path) ? " content_css : \"".$css_path."\",\n" : "";
$cssSelector = !empty($css_selectors) ? " theme_advanced_styles : \"".$css_selectors."\",\n" : "";
$elmList = !empty($elmList) ? " elements : \"".$elmList."\",\n" : "";
// Build init options
$tinymceInit .= " theme : \"".$tinyTheme."\",\n";
$tinymceInit .= " mode : \"exact\",\n";
$tinymceInit .= $width ? " width : \"".str_replace("px", "", $width)."\",\n" : "";
$tinymceInit .= $height ? " height : \"".str_replace("px", "", $height)."\",\n" : "";
$tinymceInit .= " relative_urls : ".$relative_urls.",\n";
$tinymceInit .= $document_base_url;
$tinymceInit .= " remove_script_host : ".$remove_script_host.",\n";
$tinymceInit .= $convert_urls == false ? " convert_urls : false,\n":"";
$tinymceInit .= " language : \"".$language."\",\n";
$tinymceInit .= $elmList;
$tinymceInit .= " valid_elements : tinymce_valid_elements,\n";
$tinymceInit .= " extended_valid_elements : tinymce_extended_valid_elements,\n";
$tinymceInit .= " invalid_elements : tinymce_invalid_elements,\n";
$tinymceInit .= $cssPath;
$tinymceInit .= " entity_encoding : \"".$entity_encoding."\",\n";
$tinymceInit .= ($entity_encoding == "named" && !empty($entities)) ? " entities : \"".$entities."\",\n" :"";
$tinymceInit .= " cleanup: ".(($cleanup == "enabled" || empty($cleanup)) ? "true" : "false").",\n";
$tinymceInit .= " apply_source_formatting : true,\n";
$tinymceInit .= " remove_linebreaks : false,\n";
$tinymceInit .= " convert_fonts_to_spans : \"true\",\n";
// Advanced options
if($theme == "editor" || $theme == "custom"){
if($frontend=='false' || ($frontend=='true' && $webuser)){
$tinymceInit .= ($use_browser==1 ? " file_browser_callback : \"myFileBrowser\",\n":"");
$tinyCallback = <<<TINY_CALLBACK
function myFileBrowser (field_name, url, type, win) {
if (type == 'media') {type = win.document.getElementById('media_type').value;}
var cmsURL = '{$base_url}manager/media/browser/mcpuk/browser.php?Connector={$base_url}manager/media/browser/mcpuk/connectors/php/connector.php&ServerPath={$base_url}&editor=tinymce3&editorpath={$tinyURL}'; // script URL - use an absolute path!
switch (type) {
case "image":
type = 'images';
break;
case "media":
case "qt":
case "wmp":
case "rmp":
type = 'media';
break;
case "shockwave":
case "flash":
type = 'flash';
break;
case "file":
type = 'files';
break;
default:
return false;
}
if (cmsURL.indexOf("?") < 0) {
//add the type as the only query parameter
cmsURL = cmsURL + "?type=" + type;
}
else {
//add the type as an additional query parameter
// (PHP session ID is now included if there is one at all)
cmsURL = cmsURL + "&type=" + type;
}
var windowManager = tinyMCE.activeEditor.windowManager.open({
file : cmsURL,
width : screen.width * 0.7, // Your dimensions may differ - toy around with them!
height : screen.height * 0.7,
resizable : "yes",
inline : "yes", // This parameter only has an effect if you use the inlinepopups plugin!
close_previous : "no"
}, {
window : win,
input : field_name
});
if (window.focus) {windowManager.focus()}
return false;
}
TINY_CALLBACK;
}
if($frontend=='false'){
$tinymceInit .= ($linklist == 'enabled') ? " external_link_list_url : \"".$tinyURL."/tinymce.linklist.php\",\n" : "";
}
if(isset($blockFormats)){$tinymceInit .= " theme_advanced_blockformats : \"".$blockFormats."\",\n";}
$tinymceInit .= $cssSelector;
$tinymceInit .= " plugins : \"".$plugins."\",\n";
$tinymceInit .= " theme_advanced_buttons0 : \"\",\n";
$tinymceInit .= " theme_advanced_buttons1 : \"".$buttons1."\",\n";
$tinymceInit .= " theme_advanced_buttons2 : \"".$buttons2."\",\n";
$tinymceInit .= " theme_advanced_buttons3 : \"".$buttons3."\",\n";
$tinymceInit .= " theme_advanced_buttons4 : \"".$buttons4."\",\n";
$tinymceInit .= " theme_advanced_toolbar_location : \"top\",\n";
$tinymceInit .= " theme_advanced_toolbar_align : \"".($toolbar_align =="rtl" ? "right" : "left")."\",\n";
$tinymceInit .= " theme_advanced_path_location : \"bottom\",\n";
$tinymceInit .= " theme_advanced_disable : \"".$disabledButtons."\",\n";
$tinymceInit .= " theme_advanced_resizing : ".(!empty($resizing) ? $resizing : "false").",\n";
$tinymceInit .= " theme_advanced_resize_horizontal : false,\n";
$tinymceInit .= (!empty($advimage_styles) ? " advimage_styles : \"".$advimage_styles."\",\n" : "");
$tinymceInit .= (!empty($advlink_styles) ? " advlink_styles : \"".$advlink_styles."\",\n" : "");
$tinymceInit .= " plugin_insertdate_dateFormat : \"%Y-%m-%d\",\n";
$tinymceInit .= " plugin_insertdate_timeFormat : \"%H:%M:%S\",\n";
if(!empty($customparams)){
$params = explode(",",$customparams);
$paramsCount = count($params);
for ($i=0;$i<$paramsCount;$i++) {
if(!empty($params[$i])){
$tinymceInit .= " ".trim($params[$i]).",\n";
}
}
}
}
if($frontend=='false'){
$tinymceInit .= " onchange_callback : \"myCustomOnChangeHandler\",\n";
}
$tinymceInit .= " button_tile_map : false \n";
$script = <<<TINY_SCRIPT
<script language="javascript" type="text/javascript" src="{$tinyURL}/jscripts/tiny_mce/tiny_mce.js"></script>
<script language="javascript" type="text/javascript" src="{$tinyURL}/xconfig.js"></script>
<script language="javascript" type="text/javascript">
tinyMCE.init({
{$tinymceInit}
});
{$tinyCallback}
function myCustomOnChangeHandler() {
documentDirty = true;
}
</script>
TINY_SCRIPT;
return $script;
}
}
?><file_sep>/manager/includes/support.inc.php
<?php
/**
* Developer Support Functions
* Customize these functions to add support for your clients
* and save the customized file as overide.support.inc.php
*
* The system will first look for the override.support.inc.php file.
* If it's not found then it will use support.inc.php
*
*/
// Show support link on Manager Login page
function showSupportLink(){
?>
<!-- Here you can add your own support information and website -->
<div style="color:#808080">
<p>Supported By:</p>
<p style="text-align:center"><a href="http://www.modxcms.com/forums/" target="_blank"><strong>The MODx <br />Community</strong></a></p>
</div>
<?php
}
// sends an mail to support site
function mailSupport($sender,$subject,$message){
// to-do:
}
// checks support site for updates
function checkForUpdates() {
// to-do:
}
?><file_sep>/assets/snippets/ditto/notes.txt
In the next release:
Finish PHx truncate extension
Conditional Templating
Optimize getKeywords
Allow config to load extenders
Documentation Changes TODO:
caseSensitive needs to be added
&yearSortDir and &monthSortDir in Reflect
Fix givenTags = array in debug
Check filtering with tv prefix...
check, first, last, etc. templates
|
75e915054706b7d74bd0170753d898b305467a6c
|
[
"JavaScript",
"SQL",
"Text",
"PHP"
] | 21 |
SQL
|
jgrau/MODx-CMS
|
c0107a83a8c073ac62f9e4ec1751160665a153d7
|
eff9c95b34dd168e09742561436242e8b7465411
|
refs/heads/master
|
<file_sep>package sample;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import javafx.event.ActionEvent;
import javafx.fxml.FXML;
import javafx.scene.control.*;
import javafx.scene.control.Button;
import javafx.scene.control.Label;
import javafx.scene.control.TextArea;
public class Controller {
@FXML
private ToggleGroup Size;
@FXML
private Button saveOrder;
@FXML
private Button checkPrice;
@FXML
private CheckBox GreenPepper;
@FXML
private ToggleGroup Cheese;
@FXML
private TextArea orderSummary;
@FXML
private ChoiceBox<Integer> numPizzas;
@FXML
private CheckBox Ham;
@FXML
private CheckBox Pineapple;
@FXML
private Label pizzaCost;
// toppings
String hamTopping;
String greenPepperTopping;
String pineappleTopping;
// size, cheese, quantity
String sizeChoice;
String cheeseChoice;
int quantityChoice;
@FXML
// ensures that Ham is selected before pineapple or green pepper
private void exceptionHam() {
Ham.setAllowIndeterminate(false);
if (Ham.isSelected()) {
hamTopping = "Single";
} else {
Pineapple.setSelected(false);
GreenPepper.setSelected(false);
}
}
@FXML
private void exceptionPineapple() {
if (Pineapple.isSelected()) {
//Check if the user has selected ham, if not do not let them select pineapple
if (Ham.isSelected()) {
pineappleTopping = "Single";
} else if(!(Ham.isSelected())) {
Pineapple.setSelected(false);
}
}
}
@FXML
private void exceptionGreenPepper() {
if (GreenPepper.isSelected()) {
//Check if the user has selected ham, if not do not let them select green pepper
if (Ham.isSelected()) {
greenPepperTopping = "Single";
} else if(!(Ham.isSelected())) {
GreenPepper.setSelected(false);
}
}
}
private ObservableList<Integer> quantityOption = FXCollections.observableArrayList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10,
11, 12, 13, 14, 15, 16, 17, 18, 19,
20, 21, 22, 23, 24, 25, 26, 27, 28, 29,
30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
40, 41, 42, 43, 44, 45, 46, 47, 48, 49,
50, 51, 52, 53, 54, 55, 56, 57, 58, 59,
60, 61, 62, 63, 64, 65, 66, 67, 68, 69,
70, 71, 72, 73, 74, 75, 76, 77, 78, 79,
80, 81, 82, 83, 84, 85, 86, 87, 88, 89,
90, 91, 92, 93, 94, 95, 96, 97, 98, 99,
100);
@FXML
private void initialize() {
numPizzas.setItems(quantityOption);
numPizzas.setValue(1);
}
//This method is used for both buttons. It creates a pizza object based on what was entered by user
private LineItem savePizza() throws IllegalPizza{
sizeChoice = ((RadioButton)Size.getSelectedToggle()).getText();
cheeseChoice = ((RadioButton)Cheese.getSelectedToggle()).getText();
quantityChoice = numPizzas.getValue();
if (hamTopping == null)
hamTopping = "None";
if ( pineappleTopping == null)
pineappleTopping = "None";
if (greenPepperTopping == null)
greenPepperTopping = "None";
Pizza pizzaOrder = new Pizza(sizeChoice, cheeseChoice, pineappleTopping, greenPepperTopping, hamTopping);
LineItem lineItemOrder = new LineItem(quantityChoice, pizzaOrder);
return lineItemOrder;
}
@FXML
private void pressCheckPrice(ActionEvent checkPrice) throws IllegalPizza {
LineItem lineItemObject = savePizza();
String costValue = String.valueOf(lineItemObject.getCost());
pizzaCost.setText("$" + costValue + "0");
}
@FXML
private void orderSavedClicked() throws IllegalPizza {
LineItem lineItemObject = savePizza();
// this will run after only after the first order has been saved.
// the line consisting of the total order cost is removed and updated
try {
// delete the last line from the TextField
String orderSummaryString = orderSummary.getText();
int indexStart = orderSummary.getText().indexOf("T");
int indexEnd = orderSummary.getText().lastIndexOf("0");
orderSummary.deleteText(indexStart, indexEnd + 1);
// get the value of the cost of the previous order, add it to current cost for total cost
String previousCostString = orderSummaryString.substring(orderSummaryString.lastIndexOf("$") + 1, indexEnd);
double previousCostDouble = Double.valueOf(previousCostString);
double orderCost = lineItemObject.getCost();
double totalCost = orderCost + previousCostDouble;
// display to user
String lineItemString = lineItemObject.toString();
orderSummary.appendText(lineItemString + "\n");
orderSummary.appendText("Total Cost: $" + totalCost + "0\n");
// first run through, since there will be an IndexOutOfBoundsException when trying to delete previous total cost
} catch (Exception IndexOutOfBoundsException) {
String lineItemString = lineItemObject.toString();
String orderCost = String.valueOf(lineItemObject.getCost());
orderSummary.appendText(lineItemString + "\n");
orderSummary.appendText("Total Cost: $" + orderCost + "0\n");
}
}
}
|
87492fdcb904aa32187bbbb9f32689a1172a1891
|
[
"Java"
] | 1 |
Java
|
Slabrosse/PizzaOrderingSystem
|
700601c7c2c2b32655cbfc592819b3ae171d109c
|
73a8c11f5a2e6745527ad13fa504520d89b16345
|
refs/heads/master
|
<repo_name>ImperialCollegeLondon/dcw-splittingintowords<file_sep>/c-versions/set.c
/*
* set.c: "set of strings" storage for C..
* we store a set as a reduced (no values) hash table,
* hashing each set member (key) into a dynamic array of
* binary search trees, and then search/include/exclude
* the key in/from the corresponding search tree. The
* set also stores a key print function pointer so that
* the set members can be complex data structures printed
* appropriately. We handle exclusion of a member from
* a set by marking the key as not "in" the set.
*
* (C) <NAME>, 1996-2017 although it seems longer:-)
*/
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <assert.h>
#include "set.h"
#define NHASH 32533
typedef struct tree_s *tree;
struct set_s {
tree * data; /* dynamic array of trees */
set_printfunc p;
};
struct tree_s {
set_key k; /* Key, i.e. set member */
int in; /* is member included? */
tree left; /* Left... */
tree right; /* ... and Right trees */
};
/*
* operation
*/
typedef enum { Search, Define, Exclude } tree_operation;
/* Private functions */
static void foreach_tree( tree, set_foreachcbfunc, void * );
static void dump_cb( set_key, void * );
static void include_cb( set_key, void * );
static void exclude_cb( set_key, void * );
static void exclude_if_notin_cb( set_key, void *);
static void diff_cb( set_key, void *);
static void count_cb( set_key, void *);
static tree copy_tree( tree );
static void free_tree( tree );
static int depth_tree( tree );
static tree talloc( set_key );
static tree tree_op( set, set_key, tree_operation );
static int shash( char * );
/*
* Create an empty set
*/
set setCreate( set_printfunc p )
{
int i;
set s;
s = (set) malloc( sizeof(struct set_s) );
s->data = (tree *) malloc( NHASH*sizeof(tree) );
s->p = p;
for( i = 0; i < NHASH; i++ )
{
s->data[i] = NULL;
}
return s;
}
/*
* Empty an existing set - ie. retain only the skeleton..
*/
void setEmpty( set s )
{
int i;
for( i = 0; i < NHASH; i++ )
{
if( s->data[i] != NULL )
{
free_tree( s->data[i] );
s->data[i] = NULL;
}
}
}
/*
* Copy an existing set.
*/
set setCopy( set s )
{
int i;
set result;
result = (set) malloc( sizeof(struct set_s) );
result->data = (tree *) malloc( NHASH*sizeof(tree) );
result->p = s->p;
for( i = 0; i < NHASH; i++ )
{
result->data[i] = copy_tree( s->data[i] );
}
return result;
}
/*
* Free the given set - clean it up and delete it's skeleton too..
*/
void setFree( set s )
{
int i;
for( i = 0; i < NHASH; i++ )
{
if( s->data[i] != NULL )
{
free_tree( s->data[i] );
}
}
free( (void *) s->data );
free( (void *) s );
}
/*
* Set metrics:
* calculate the min, max and average depth of all non-empty trees
* sadly can't do this with a setForeach unless the depth is magically
* passed into the callback..
*/
void setMetrics( set s, int *min, int *max, double *avg )
{
int i;
int nonempty = 0;
int total = 0;
*min = 100000000;
*max = -100000000;
for( i = 0; i < NHASH; i++ ) {
if( s->data[i] != NULL )
{
int d = depth_tree( s->data[i] );
if( d < *min ) *min = d;
if( d > *max ) *max = d;
total += d;
nonempty++;
}
}
*avg = ((double)total)/(double)nonempty;
}
/*
* Include item in set s
*/
void setInclude( set s, set_key item )
{
(void) tree_op( s, item, Define);
}
/*
* Exclude item from set s
*/
void setExclude( set s, set_key item )
{
(void) tree_op( s, item, Exclude);
}
/*
* Convenience function:
* Given a changes string of the form "[+-]item[+-]item[+-]item..."
* modify the given set s, including (+) or excluding (-) items
* NB: This assumes that key == char *..
*/
void setModify( set s, set_key changes )
{
char *str = strdup( changes ); /* so we can modify it! */
char *p = str;
char cmd = *p;
while( cmd != '\0' ) /* while not finished */
{
assert( cmd == '+' || cmd == '-' );
p++;
/* got a string of the form... [+-]itemstring[+-\0]... */
/* cmd = the + or - command */
/* and p points at the first char ^p */
/* find the next +- command, ^q */
char *q = p;
for( ; *q != '\0' && *q != '+' && *q != '-'; q++ );
/* terminate itemstring here, remembering the next cmd */
char nextcmd = *q;
*q = '\0';
/* now actually include/exclude the item from the set */
if( cmd == '+' )
{
setInclude( s, p );
} else
{
setExclude( s, p );
}
/* set up for next time */
cmd = nextcmd; /* the next command */
p = q; /* the next item */
}
free( (void *)str );
}
/*
* Look for an item in the set s
*/
int setIn( set s, set_key item )
{
tree x = tree_op(s, item, Search);
return x != NULL && x->in;
}
/*
* perform a foreach operation over a given set
* call a given callback for each item pair.
*/
void setForeach( set s, set_foreachcbfunc cb, void * arg )
{
int i;
for( i = 0; i < NHASH; i++ ) {
if( s->data[i] != NULL )
{
foreach_tree( s->data[i], cb, arg );
}
}
}
/* ----------- Higher level operations using setForeach -------------- */
/* - each using it's own callback, sometimes with a custom structure - */
/*
* setDump: Display a set to a file.
* Here, we need to know where (FILE *out) and how (printfunc p) to
* display each item of the set.
*/
typedef struct { FILE *out; set_printfunc p; } dumparg;
static void dump_cb( set_key k, void * arg )
{
dumparg *dd = (dumparg *)arg;
if( dd->p != NULL )
{
(*(dd->p))( dd->out, k );
} else
{
fprintf( dd->out, "%s,", k );
}
}
void setDump( FILE *out, set s )
{
dumparg arg; arg.out = out; arg.p = s->p;
fputs("{ ",out);
setForeach( s, &dump_cb, (void *)&arg );
fputs(" }",out);
}
/*
* setUnion: a += b
* include each item of b into a
*/
static void include_cb( set_key k, void *s )
{
setInclude( (set)s, k );
}
void setUnion( set a, set b )
{
setForeach( b, &include_cb, (void *)a );
}
/*
* Set subtraction, a -= b
* exclude each item of b from a
*/
static void exclude_cb( set_key k, void *s )
{
setExclude( (set)s, k );
}
void setSubtraction( set a, set b )
{
setForeach( b, &exclude_cb, (void *)a );
}
/*
* Set intersection, a = a&b
* exclude each member of a FROM a UNLESS in b too
* here we need to pass both sets to the callback,
* via this "pair of sets" structure:
*/
typedef struct { set a, b; } setpair;
static void exclude_if_notin_cb( set_key k, void *arg )
{
setpair *d = (setpair *)arg;
if( ! setIn(d->b, k) )
{
setExclude( d->a, k );
}
}
void setIntersection( set a, set b )
{
setpair data; data.a = a; data.b = b;
setForeach( a, &exclude_if_notin_cb, (void *)&data );
}
/*
* Set difference, simultaneous a -= b and b -= a
* exclude each item of both sets from both sets, LEAVING
* - a containing elements ONLY in a, and
* - b containing elements ONLY in b.
*/
static void diff_cb( set_key k, void *arg )
{
setpair *d = (setpair *)arg;
if( setIn(d->a, k) )
{
setExclude( d->a, k );
setExclude( d->b, k );
}
}
void setDiff( set a, set b )
{
setpair data; data.a = a; data.b = b;
setForeach( b, &diff_cb, (void *)&data );
}
/*
* Set members: how many members in the set?
*/
static void count_cb( set_key k, void *arg )
{
int *n = (int *)arg;
(*n)++;
}
int setMembers( set s )
{
int n = 0;
setForeach( s, &count_cb, (void *)&n );
return n;
}
/*
* Set isEmpty: is the set empty?
*/
int setIsEmpty( set s )
{
int n = setMembers(s);
return n==0;
}
/* -------------------- Binary search tree ops --------------------- */
/*
* Allocate a new node in the tree
*/
static tree talloc( set_key k )
{
tree p = (tree) malloc(sizeof(struct tree_s));
if( p == NULL )
{
fprintf( stderr, "talloc: No space left\n" );
exit(1);
}
p->left = p->right = NULL;
p->k = strdup(k); /* Save key */
p->in = 1; /* Include it */
return p;
}
/*
* Operate on the Binary Search Tree
* Search, Define, Exclude.
*/
static tree tree_op( set s, set_key k, tree_operation op )
{
tree ptr;
tree * aptr = s->data + shash(k);
while( (ptr = *aptr) != NULL )
{
int rc = strcmp(ptr->k, k);
if( rc == 0 )
{
if( op == Define )
{
ptr->in = 1;
} else if( op == Exclude )
{
ptr->in = 0;
} else if( ! ptr->in )
{
return NULL;
}
return ptr;
}
if (rc < 0)
{
/* less - left */
aptr = &(ptr->left);
} else
{
/* more - right */
aptr = &(ptr->right);
}
}
if (op == Define )
{
return *aptr = talloc(k); /* Alloc new node */
}
return NULL; /* not found */
}
/*
* Copy one tree
*/
static tree copy_tree( tree t )
{
tree result = NULL;
if( t )
{
result = talloc( t->k );
result->in = t->in;
result->left = copy_tree( t->left );
result->right = copy_tree( t->right );
}
return result;
}
/*
* foreach one tree
*/
static void foreach_tree( tree t, set_foreachcbfunc f, void * arg )
{
assert( f != NULL );
if( t )
{
if( t->left ) foreach_tree( t->left, f, arg );
if( t->in ) (*f)( t->k, arg );
if( t->right ) foreach_tree( t->right, f, arg );
}
}
/*
* Free one tree
*/
static void free_tree( tree t )
{
if( t )
{
if( t->left ) free_tree( t->left );
if( t->right ) free_tree( t->right );
free( (void *) t->k );
free( (void *) t );
}
}
/*
* Compute the depth of a given tree
*/
#define max(a,b) ((a)>(b)?(a):(b))
static int depth_tree( tree t )
{
if( t )
{
int d2 = depth_tree( t->left );
int d3 = depth_tree( t->right );
return 1 + max(d2,d3);
}
return 0;
}
/*
* Calculate hash on a string
*/
static int shash( char *str )
{
unsigned char ch;
unsigned int hh;
for (hh = 0; (ch = *str++) != '\0'; hh = hh * 65599 + ch );
return hh % NHASH;
}
<file_sep>/c-versions/Makefile
#TOOLS = $(HOME)/c-tools
#LIBDIR = $(TOOLS)/lib/$(ARCH)
#INCDIR = $(TOOLS)/include
#CFLAGS = -I. -I$(INCDIR) -Wall -g
#LDLIBS = -L$(LIBDIR) -lset
CFLAGS = -Wall -g
LDLIBS =
CC = gcc
all: findlongest backtrack
findlongest: findlongest.o set.o
$(CC) -o findlongest $(LDLIBS) findlongest.o set.o
backtrack: backtrack.o set.o
$(CC) -o backtrack $(LDLIBS) backtrack.o set.o
clean:
/bin/rm -f findlongest backtrack *.o core a.out
<file_sep>/c-versions/set.h
/*
* set.h: set (based on hash) storage for C..
*
* (C) <NAME>, 1996-2017 although it seems longer:-)
*/
typedef struct set_s *set;
typedef char *set_key;
typedef void (*set_printfunc)( FILE *, set_key );
typedef void (*set_foreachcbfunc)( set_key, void * );
extern set setCreate( set_printfunc p );
extern void setEmpty( set s );
extern set setCopy( set s );
extern void setFree( set s );
extern void setMetrics( set s, int * min, int * max, double * avg );
extern void setInclude( set s, set_key item );
extern void setExclude( set s, set_key item );
extern void setModify( set s, set_key changes );
extern int setIn( set s, set_key item );
extern void setForeach( set s, set_foreachcbfunc cb, void * arg );
extern void setDump( FILE * out, set s );
extern void setUnion( set a, set b );
extern void setSubtraction( set a, set b );
extern void setIntersection( set a, set b );
extern void setDiff( set a, set b );
extern int setMembers( set s );
extern int setIsEmpty( set s );
<file_sep>/c-versions/backtrack.c
/*
* backtrack: read a dictionary forming a dictionary set, add some
* extra words from the command line, take a sentence
* WITH NO SPACES, and attempt to parse the sentence as
* a sequence of words. Of course there can be many
* solutions (aka the "loitering with intent" vs
* "loitering within tent" problem).
* Which solution do we find? at each stage, we pick **the
* longest possible prefix such that it's lowercased version
* is a dictionary word**. But if no solution is found
* having picked the longest word, we backtrack and try the
* next shortest word...
*/
#include <stdio.h>
#include <stdlib.h>
#include <stdbool.h>
#include <string.h>
#include <ctype.h>
#include <assert.h>
#include "set.h"
#define min(x,y) ((x)<(y)?(x):(y))
// no single word in the dictionary longer than..
#define MAXWORDLEN 1024
// max number of words in setence..
#define MAXWORDS 100
typedef char aword[MAXWORDLEN];
typedef char *wordarray[MAXWORDS];
// while splitting, we represent words within the sentence
// as a list of lengths eg given "MostEnglishsentencesaremostlylowercase",
// we'd have 4 (Most), 7 (English), 9 (sentences), 3 (are) etc..
// this saves copying words out of the original sentence all the time
typedef int wordinfo[MAXWORDS];
/*
* alllower( string );
* Lower case the given string, in place.
*/
void alllower( char *p )
{
for( ; *p; ++p) *p = tolower(*p);
}
/*
* set worddset = readdict( wordlistfile, extra_words[], int *longest );
* Read a word list <wordlistfile> (and add some extra words contained
* in <extra_words[]>, terminated by NULL), build and return a set
* of all those LOWERCASED words, also setting *longest to the length
* of the longest word in the whole set.
*/
set readdict( char *wordlistfile, wordarray extra_words, int *longest )
{
set dict = setCreate( NULL );
*longest = 0;
for( char **w = extra_words; *w != NULL; w++ )
{
// add lowercased word to dictionary
alllower( *w );
setInclude( dict, *w );
// update longest
int len = strlen(*w);
if( len > *longest ) *longest = len;
}
// foreach line (word!) in wordlistfile
FILE *fh = fopen( wordlistfile, "r" );
assert( fh != NULL );
aword word;
while( fgets(word, MAXWORDLEN, fh ) != NULL )
{
// remove trailing '\n' - if not present, line too long: die!
char *last = word + strlen(word) - 1;
assert( *last == '\n' );
*last = '\0';
// add lowercased word to dictionary
alllower( word );
setInclude( dict, word );
// update longest
int len = strlen(word);
if( len > *longest ) *longest = len;
}
fclose( fh );
return dict;
}
/*
* convertwords( sentence, nwords, wlen, result );
* Given a <sentence> in original case, the number of words <nwords>,
* and an array <wlen> of word lengths, build <result>, a word array.
* The storage for <result> must have alredy been allocated, but not
* the storage for each string (char *).
*/
void convertwords( char *sentence, int nwords, wordinfo wlen, wordarray result )
{
assert( nwords < MAXWORDS );
int size = strlen(sentence) + nwords + 1;
assert( size < MAXWORDLEN );
char *block = (char *)malloc( size*sizeof(char) );
assert( block != NULL );
char *dst = block;
char *src = sentence;
for( int i=0; i<nwords; i++ )
{
result[i] = dst;
// copy wlen chars from src into dst..
int l = wlen[i];
strncpy( dst, src, l );
dst[l] = '\0';
//printf( "debug: word %d is %s\n", i, dst );
src += l;
dst += l+1;
}
}
/*
* int nwords = canbreakwords( lc_str, dict, maxwordlen, wordlen[], nwordssofar );
* Given a lower-case string <lc_str>, a dictionary set <dict>, and the
* length of the longest word in the set <maxwordlen>, try to break
* the original sentence up into an array of word lengths, preferring to
* pick the longest possible prefix that is a word in the dictionary set,
* but backtracking if necessary.
* The array of word lengths is built up in wordlen[], no more than
* MAXWORDS allowed.
* Return the number of words found - or -1 if no breakdown is possible.
*/
int canbreakwords( char *lc_str, set dict, int maxwordlen, wordinfo wordlen, int nwordssofar )
{
for( int wlen = min(maxwordlen,strlen(lc_str)); wlen>0; wlen-- )
{
// consider word starting at lc_str, length wlen:
// is it a dict word?
// temporarly string terminate lc_str at wlen..
char ch = lc_str[wlen];
lc_str[wlen] = '\0';
// check if lc_str is a word (in dict)?
bool isword = setIn( dict, lc_str);
// found a dictionary word?
if( isword )
{
// add wlen to words so far..
wordlen[nwordssofar] = wlen;
assert( nwordssofar < MAXWORDS );
//printf( "debug: cbw: found word %s of length %d\n", lc_str, wlen );
// change it back
lc_str[wlen] = ch;
// have we finished the entire string?
if( ch == '\0' )
{
return nwordssofar+1;
}
// try to break the rest..
int nwords = canbreakwords( lc_str+wlen, dict, maxwordlen, wordlen, nwordssofar+1 );
if( nwords != -1 )
{
return nwords;
}
}
// change it back
lc_str[wlen] = ch;
}
return -1;
}
/*
* int nwords = breakwords( sentence, dict, longestwordlen, words[] );
* Given a <sentence> with no spaces, a dictionary set <dict>, and
* the length of the longest word in the set <longestwordlen>, break
* the original sentence up into an array of words, preferring to pick
* the longest possible prefix that is a word in the dictionary set,
* but backtracking to pick shorter word-prefixes if necessary.
* The array of words is built up in words[], no more than MAXWORDS
* allowed. Each individual word can be no longer than MAXWORDLEN.
* Return the number of words found - or -1 if no breakdown is possible.
*/
int breakwords( char *sentence, set dict, int longestwordlen, wordarray words )
{
assert( strlen(sentence) < MAXWORDLEN );
aword lc_sentence;
strcpy( lc_sentence, sentence );
alllower( lc_sentence );
wordinfo wordlen;
int nwords = canbreakwords( lc_sentence, dict, longestwordlen, wordlen, 0 );
if( nwords == -1 ) return -1;
// now need to extract the words using wordlen[i] = starting posn
convertwords( sentence, nwords, wordlen, words );
return nwords;
}
aword wordlistfile = "/usr/share/dict/words";
char *usage =
"backtrack (''|wordlistfile) sentencewithoutspaces [extra words]";
int main( int argc, char **argv )
{
if( argc < 3 )
{
fprintf( stderr, "%s\n", usage );
exit(1);
}
// if wordlistfile is an empty string, use above default
if( strlen(argv[1]) > 0 )
{
strcpy( wordlistfile, argv[1] );
}
aword sentence;
strcpy( sentence, argv[2] );
int nextra = argc-3;
assert( nextra < MAXWORDS );
char **extra_words = argv+3;
// dict: the set of all dictionary words, lower cased
int maxwordlen=0;
set dict = readdict( wordlistfile, extra_words, &maxwordlen );
printf( "read dict, maxwordlen=%d\n", maxwordlen );
wordarray words;
int nwords = breakwords( sentence, dict, maxwordlen, words );
// print results:
if( nwords == -1 )
{
printf( "No solution found\n" );
} else
{
printf( "found solution with %d words\n", nwords );
// where's Perl's "join" function when you need it:-)
for( int i=0; i<nwords; i++ )
{
printf( "%s%c", words[i], i==nwords-1?'\n':' ' );
}
}
free( words[0] );
setFree( dict );
return 0;
}
<file_sep>/README.md
May 2017: on LinkedIn's Plain Old C programming group, <NAME> set
himself a very hard challenge (that happens to be used as a sample Google
Interview problem used in a Youtube tutorial) - splitting a sentence with
no spaces up into words. eg "IamEricBidong" -> "I am <NAME>".
Obviously, lower case -> upper case transitions are one possible useful
heuristic, this is what Howard Brodale lept upon and coded up, but that
could be at most only a part of the solution.
Why? Because, quite simply:
**Most English sentences don't conveniently uppercase the first letter of
every word**
Here in my experiments, I've chosen to ignore case entirely doing comparisons,
i.e. I do all "is this string a word" lookups with lower-case candidate words,
but then I extract the equivalent length string from the original sentence
for better reporting of the answer(s):-)
To explore the problem space, I tried various experiments first in my
favourite "executable pseudo-code" (Perl) for convenience (directory
perl-versions):
First I wrote "findwords1", a relatively simple algorithm that at each
stages picks-the-longest-prefix-that-is-a-word (ignoring case). This
works surprisingly well in many cases - although of course the classic
"loiteringwithintent" comes out as "loitering within tent", as "within"
is a longer word with "with".
One problem: initially I used the Linux /usr/share/dict/words as the word
list file, but I found that it contains every single letter a..z, upper
and lower case, and also contains several ridiculous non-real two-letter
words. This caused findwords1 to split "IamEricBidong" into
"I am Eric B id on g" or some such nonsense. I explored min len restrictions,
but of course 'I' and 'a' must be words, so forget that. Instead I copied
dict/words here as "my-dict-words", and removed all single letter entries
(EXCEPT A and I) and also removed a dozen or so two-letter "words" like "er"
and "li".
Specify a different word list file via:
"./findwords1 -w ../my-dict-words SENTENCE"
Also, I gave findwords1 the ability to take any additional arguments (after
the sentence) as extra words to add to the dictionary. For example:
it happens that tragically "Bidong" is not a word in Linux's dict/words. So:
./findwords1 -w ../my-dict-words IamEricBidong
fails completely, whereas
./findwords1 -w ../my-dict-words IamEricBidong bidong
succeeds (because the last "bidong" is the extra word, case doesn't matter).
Second, I wrote "findwords2", an extended version which adds an element
of backtracking, in case picking the longest prefix earlier leads to no
solution, but picking a slightly shorter prefix earlier would lead to a
solution. For example:
./findwords1 -w ../my-dict-words iamericall
fails to find a solution at all, because it commits to "i america" and then
fails with "ll", whereas
./findwords2 -w ../my-dict-words iamericall
delivers the breakdown "i am eric all".
Note that
./findwords2 -w ../my-dict-words "MostEnglishsentencesdon'tconvenientlyuppercasethefirstletterofeveryword"
works to generate the sentence we saw earlier. Isn't that nice:-)
Third, I wrote a rather different version called "findallpossible" which
produces ALL possible word breakdowns (where every candidate word is in the
dictionary). It's left as a (very hard) exercise for the reader to work out
which word breakdown is the most grammatical, or the most semantically
meaningful, or the one with the greatest probability of being correct,
or whatever.
Fourth, I translated findwords1 into C, giving c-versions/findlongest.c,
using a set-of-strings ADT module that I wrote many years ago - essentially
it's a hash table with no values:-)
Note how 108 lines of Perl (findwords1) becomes 764 lines of C (including
set.c and set.h), or 204 lines of C (excluding the set module, which to
be fair I had written years ago). Actually, many of those lines (in both
Perl and C versions) are comments, without those it's roughly 80 lines of
Perl, 165 lines of C. Either way, it seems roughly 2 lines of C per line
of Perl (still excluding the set module).
You can run findlongest as follows:
./findlongest ../my-dict-words "MostEnglishsentencesdon'tconvenientlyuppercasethefirstletterofeveryword"
Fifth, I translated the "with backtracking" version findwords2 into C.
The result is c-versions/backtrack.c, which behaves exactly the same as
findwords2 does, i.e. where findwords1/findlongest finds a solution,
findwords2/backtrack will find the same solution, but where
findwords1/findlongest fails to find a solution, as in the "Iamericall"
example, findwords2/backtrack is much more likely to find a solution.
This time, 154 lines of Perl translates into 264 lines of C - a much better
ratio. I have implemented backtrack.c in a far more "C like" fashion, for
example extracting words in place rather than copying them around all the
time as the Perl version does, and building an array of word lengths rather
than an array of words.
You can run backtrack as follows:
./backtrack ../my-dict-words "MostEnglishsentencesdon'tconvenientlyuppercasethefirstletterofeveryword"
dcw, May 2017
|
90ae11e15616c35c079fdec39ee47b7c71ee9c0a
|
[
"Markdown",
"C",
"Makefile"
] | 5 |
C
|
ImperialCollegeLondon/dcw-splittingintowords
|
3caf5acd611f2544100d05fce75c551ff1003383
|
cad27f9b6dd98955ca48a573fb5513adb98f3806
|
refs/heads/master
|
<file_sep># MACROS
CXX = g++
CXXFLAGS = -O3 -Wall -std=c++11
LIBS = -pthread
OBJS = sender.o receiver.o test.o
EXES = sender receiver test
# TARGETS
all: $(EXES)
sender: sender.o
$(CXX) -o sender sender.o $(CXXFLAGS) $(LIBS)
sender.o: sender.cc util.h
$(CXX) -o sender.o -c sender.cc $(CXXFLAGS) $(LIBS)
receiver: receiver.o
$(CXX) -o receiver receiver.o $(CXXFLAGS) $(LIBS)
receiver.o: receiver.cc util.h
$(CXX) -o receiver.o -c receiver.cc $(CXXFLAGS) $(LIBS)
test: test.o
$(CXX) -o test test.o $(CXXFLAGS) $(LIBS)
test.o: test.cc util.h
$(CXX) -o test.o -c test.cc $(CXXFLAGS) $(LIBS)
# REMOVAL
clean:
rm -rf $(OBJS) $(EXES)<file_sep>#ifndef UTIL_H
#define UTIL_H
#include <mutex>
#include <thread>
#include <vector>
#include <queue>
#include <iomanip>
#include <sstream>
#include <fstream>
#include <iostream>
#include <unistd.h>
#include <arpa/inet.h>
#include <unordered_map>
#include <sys/socket.h>
#include <sys/types.h>
#include <netdb.h>
#include <stdlib.h>
#include <map>
#include <string.h>
#include <algorithm>
#include <time.h>
#include <stdio.h>
#include <signal.h>
using namespace std;
//const int BUFFF_SIZE = 32;
const int SEG_SIZE = 128;
const int HEADER_SIZE = 20;
streambuf * sbuf;
ostream logout(sbuf);
mutex logout_l;
string thisip = "unknown";
string thatip = "unknown";
string thisport = "rand_port";
string thatport = "rand_port";
string thisackport = "rand_port";
string thatackport = "rand_port";
struct tcp_header{
unsigned int s_port:16, d_port: 16, seq_num: 32,
ack_num:32, :11, ack:1, :3, fin:1, w_size:16, ck_sum:16,emp:16;
tcp_header(){memset(this, 1, sizeof(*this)); ack = 0; fin = 0;}
};
string timenow()
{
time_t now;
time(&now);
return ctime(&now);
}
class tcp_socket
{
private:
int s;
public:
tcp_socket(): s(socket(AF_INET, SOCK_STREAM, 0)){}
tcp_socket(int sock) : s(sock) {}
int sock() {return s;}
void set_port(int port)
{
sockaddr_in s_in;
s_in.sin_family = AF_INET;
s_in.sin_addr.s_addr = INADDR_ANY;
s_in.sin_port = htons(port);
if (::bind(s, (sockaddr*)&s_in, sizeof(s_in)) == -1)
cout<<"bind error"<<endl;
else
{
cout<<"tcp port set to: "<<port<<endl;
listen(s, 3);
cout<<"start listenning..."<<endl;
}
}
void conn(string ip_addr, int port)
{
sockaddr_in s_cp;
s_cp.sin_family = AF_INET;
s_cp.sin_addr.s_addr = inet_addr(ip_addr.c_str());
s_cp.sin_port = htons(port);
if (connect(s, (sockaddr*)&s_cp, sizeof(s_cp)) != 0) {
cout<<"tcp conn error"<<endl;
}
}
tcp_socket acpt()
{
sockaddr_in sai;
socklen_t l = sizeof(sai);
return tcp_socket(accept(s, (sockaddr*)&sai, &l));
}
void send_ack(int seq_num)
{
tcp_header ack_header;
ack_header.ack_num = seq_num;
ack_header.ack = 1;
logout_l.lock();
logout
<<timenow()
<<"SEND ACK: "
<<thisip
<<"/"
<<thisackport
<<" "
<<thatip
<<"/"
<<thatackport
<<" "
<<to_string(ack_header.ack_num)
<<" "
<<"ACK"
<<endl;
logout_l.unlock();
//logout<<"sent ACK:"<<ack_header.ack_num<<endl;
if (!send(s, &ack_header, HEADER_SIZE, 0 ))
cout<<"send error"<<endl;
}
int receive_ack()
{
tcp_header ack_header;
if(recv(s, &ack_header, sizeof(ack_header), 0) == -1)
//cout<<"receive error"<<endl;
//cout<<"received ACK: "<<ack_header.ack_num<<endl;
logout_l.lock();
logout
<<timenow()
<<"RECEIVE ACK: "
<<thatip
<<"/"
<<thatackport
<<" "
<<thisip
<<"/"
<<thisackport
<<" "
<<to_string(ack_header.ack_num)
<<" "
<<"ACK"
<<endl;
logout_l.unlock();
return ack_header.ack_num;
}
void shut()
{
close(s);
}
};
class udp_socket
{
private:
int s;
sockaddr_in cp_addr;
public:
udp_socket(): s(socket(AF_INET, SOCK_DGRAM, 0 )) {}
udp_socket(int sock) : s(sock) {}
int sock() {return s;}
void set_port(int port)
{
sockaddr_in s_in;
s_in.sin_family = AF_INET;
s_in.sin_addr.s_addr = INADDR_ANY;
s_in.sin_port = htons(port);
if (::bind(s, (sockaddr*)&s_in, sizeof(s_in)) == -1)
cout<<"bind error"<<endl;
else
cout<<"port set to: "<<port<<endl;
}
void set_cp(string ip_str, int port)
{
addrinfo *tempinfo;
cp_addr.sin_family = AF_INET;
cp_addr.sin_addr.s_addr = INADDR_ANY;
if (getaddrinfo(ip_str.c_str(), to_string(port).c_str(), NULL, &tempinfo) == -1)
cout<<"getaddrinfo error"<<endl;
memcpy(&cp_addr, tempinfo->ai_addr, tempinfo -> ai_addrlen);
cout<<"counterpart info set, "<<ip_str<<": "<<port<<endl;
}
void send_packet(tcp_header s_header, string content)
{
char temp_cstr[HEADER_SIZE + SEG_SIZE];
char content_cstr[SEG_SIZE+1];
//cout<<sizeof(content.c_str())<<"strcpy next"<<endl;
strcpy(content_cstr, content.c_str());
memcpy(temp_cstr, &s_header, HEADER_SIZE);
memcpy((temp_cstr + HEADER_SIZE), content_cstr, SEG_SIZE);
string temp = temp_cstr;
cout<<temp.size()<<endl;
unsigned long sum = 0;
int i = 0;
for (i = 0; i < temp.size(); i+=2)
{
int temp_sum = 0;
string two_bytes = temp.substr(i,min(2,(int)(temp.size()- i)));
if (two_bytes.length() == 1) {
temp_sum += (int)two_bytes[0];
}
else
{
temp_sum += (int)two_bytes[0] * 256;
temp_sum += (int)two_bytes[1];
}
sum = sum + temp_sum;
//cout<<two_bytes<<":"<<temp_sum<<endl;
}
unsigned short remainder = sum % 65536;
unsigned short checksum = (sum >> 16) + remainder;
s_header.ck_sum = 65535 - checksum;
memcpy(temp_cstr, &s_header, HEADER_SIZE);
if( sendto(s, temp_cstr, HEADER_SIZE + SEG_SIZE, 0,(sockaddr*) &cp_addr, sizeof(cp_addr)) == -1)
cout<<"send error"<<endl;
else
{
logout_l.lock();
logout
<<timenow()
<<"SEND DATA: "
<<thisip
<<"/"
<<thisport
<<" "
<<thatip
<<"/"
<<thatport
<<" "
<<to_string(s_header.seq_num);
if (s_header.fin == 1) {
logout<<" "<<"FIN"<<endl;
}
else
logout<<endl;
logout_l.unlock();
}
}
string receive_packet(tcp_header *r_header)
{
char buff[SEG_SIZE + HEADER_SIZE];
char content[SEG_SIZE+2];
unsigned int t;
t = sizeof(cp_addr);
//cout<<"receiving..."<<endl;
recvfrom(s, buff , SEG_SIZE+HEADER_SIZE, 0,(sockaddr*)&cp_addr, &t);
//cout<<r<<" bytes data received."<<endl;
//buff[r] = '\0';
strcpy(content, buff+HEADER_SIZE);
content[SEG_SIZE] = '\0';
string result = content;
//cout<<content<<endl;
memcpy(r_header, buff, HEADER_SIZE);
//cout<<"content:"<<result<<endl;
//cout<<result.length()<<endl;
unsigned short checksum = r_header->ck_sum;
r_header->ck_sum = 65535;
memcpy(buff, r_header, HEADER_SIZE);
string temp = buff;
temp = temp.substr(0,SEG_SIZE+HEADER_SIZE);
//cout<<temp.size()<<endl;
int i = 0;
unsigned long sum = 0;
for (i = 0; i < temp.size(); i+=2)
{
int temp_sum = 0;
string two_bytes = temp.substr(i,min(2,(int)(temp.size()- i)));
if (two_bytes.length() == 1) {
temp_sum += (int)two_bytes[0];
}
else
{
temp_sum += (int)two_bytes[0] * 256;
temp_sum += (int)two_bytes[1];
}
sum = sum + temp_sum;
//cout<<two_bytes<<":"<<temp_sum<<endl;
}
sum = sum + 257*2;
//cout<<sum<<" "<<checksum<<" "<<i<<endl;
sum = sum + checksum;
if (r_header->fin == 1) {
logout_l.lock();
logout
<<timenow()
<<"RECEIVE DATA: "
<<thatip
<<"/"
<<thatport
<<" "
<<thisip
<<"/"
<<thisport
<<" "
<<to_string(r_header->seq_num)
<<" "
<<"FIN"
<<endl;
logout_l.unlock();
return result;
}
if (sum % 65536 + (sum >> 16) != 65535) {
logout_l.lock();
logout
<<timenow()
<<"RECEIVE DATA:"
<<thatip
<<"/"
<<thatport
<<" "
<<thisip
<<"/"
<<thisport
<<" "
<<to_string(r_header->seq_num)
<<" "
<<"CORRUPTED"
<<endl;
logout_l.unlock();
return receive_packet(r_header);
}
logout_l.lock();
logout
<<timenow()
<<"RECEIVE DATA: "
<<thatip
<<"/"
<<thatport
<<" "
<<thisip
<<"/"
<<thisport
<<" "
<<to_string(r_header->seq_num)
<<endl;
logout_l.unlock();
return result;
}
void shut() {close(s);}
};
#endif<file_sep>import threading
import queue
import random
import time
def mergeSort(inputlist):
start_time = time.time()
if len(inputlist) > 1:
mid = len(inputlist)//2
lefthalf = inputlist[:mid]
righthalf = inputlist[mid:]
merge_thread1 = threading.Thread(target = mergeSort, args= (lefthalf,))
merge_thread1.start()
merge_thread2 = threading.Thread(target = mergeSort, args= (righthalf,))
merge_thread2.start()
merge_thread1.join()
merge_thread2.join()
i = 0
j = 0
k = 0
while i < len(lefthalf) and j < len(righthalf):
if lefthalf[i] < righthalf[j]:
inputlist[k] = lefthalf[i]
i =i + 1
else:
inputlist[k] = righthalf[j]
j =j + 1
k = k + 1
while i < len(lefthalf):
inputlist[k] = lefthalf[i]
i = i + 1
k = k + 1
while j < len(righthalf):
inputlist[k] = righthalf[j]
j = j + 1
k = k + 1
print("list length: ", len(inputlist)," time elipsed: ", time.time()-start_time)
def randomArray(array):
for i in range(1,1001):
array.append(random.randrange(1,10000000))
print("random array generated!")
#print(array)
#print(id(array))
def main():
#array = [2,1,4,3,6,8,7,5]
array = []
randomArray(array)
#print(array)
#print(id(array))
start_time = time.time()
mergeSort(array)
#print(array)
print("sorting time: ", time.time() - start_time)
if __name__ == '__main__':
main()
<file_sep>#ifndef UTIL_H
#define UTIL_H
#include <mutex>
#include <thread>
#include <vector>
#include <iomanip>
#include <sstream>
#include <fstream>
#include <iostream>
#include <unistd.h>
#include <arpa/inet.h>
#include <unordered_map>
#include <sys/socket.h>
#include <sys/types.h>
#include <netdb.h>
#include <stdlib.h>
#include <map>
#include <string.h>
#include <algorithm>
#include <time.h>
#include <stdio.h>
#include <signal.h>
using namespace std;
typedef unordered_map <string,string> string_map;
static int BUFFER_SIZE = 1024;
static int CONSECUTIVE_FAILURES = 3;
static int BLOCK_TIME = 60;//second
static int TIME_OUT = 2;//minute
enum command {IGNORE , REQUEST_CONNECT, REQUEST_USERINFO, USERINFO,
AUTHENTICATED, LOGIN_DENIED, LOGIN_BLOCKED, CLIENT_DISP, CLIENT_LIST
,LOGOUT, WHOELSE, BROAD_MESSAGE, BROAD_USER, WHOLAST, MESSAGE_TO,
BLACK_ADD, BLACK_REMOVE,
ONLINE, OFFLINE, BLOCKED, NORMAL};
istream& operator >> (istream& in, stringstream& ss)
{
string s; in >> s; ss << s; return in;
}
void list_display(string content)
{
for (int i = 0; i < content.size(); ++i)
{
if (content[i] == ' ')
cout<<endl;
else
cout<<content[i];
}
}
void integrate_message(char* buffer, int cmd)
{
stringstream ss;
bzero(buffer,BUFFER_SIZE);
ss.str("");
ss<<cmd<<" ";
strcpy(buffer,ss.str().c_str());
}
void integrate_message(char* buffer, int cmd, string content)
{
stringstream ss;
bzero(buffer,BUFFER_SIZE);
ss.str("");
ss<<cmd<<" "<<content;
strcpy(buffer,ss.str().c_str());
}
int get_command(char* buffer)
{
string buffer_str = buffer;
string firstword;
if (buffer_str.find(' ') != string::npos)
{
firstword = buffer_str.substr(0, buffer_str.find_first_of(' '));
}
else
firstword = buffer_str;
if(firstword.empty())
{
cout<<"error: empty command!"<<endl;
cout<<buffer<<endl;
exit(1);
}
return stoi(firstword);
}
string get_content(char* buffer)
{
string content_str;
string buffer_str = buffer;
content_str = buffer_str.substr(buffer_str.find_first_of(' ')+1,buffer_str.length());
return content_str;
}
string get_content(string content)
{
if (content.find(' ') != string::npos)
content = content.substr(content.find_first_of(' ')+1,
content.length());
else
content ="";
return content;
}
class Client_user
{
public:
int socket_num;
int connection_status;
int block_status;
time_t blocked_time;
struct sockaddr block_address;
time_t last_active_time;
string username;
string password;
vector<string> offline_message;
vector<string> blacklist;
Client_user(){
socket_num = -1;
connection_status = OFFLINE;
block_status = NORMAL;
blocked_time = 0;
last_active_time = 0;
}
bool in_blacklist(string name)
{
return find(blacklist.begin(), blacklist.end(),name ) != blacklist.end();
}
void black_enlist(string name) //modify
{
blacklist.push_back(name);
}
void black_unenlist(string name)
{
blacklist.erase(remove(blacklist.begin(), blacklist.end(), name),blacklist.end());
}
};
class user_map
{
public:
map <string,Client_user> users;
vector<string> online_users;
void initial_user(string username, string pwd)
{
users[username].username = username;
users[username].password = pwd;
}
int if_user_exists(string username)
{
return users.count(username);
}
int correct_password(string username, string pwd_to_check)
{
if (users[username].connection_status == ONLINE) {
return 2;
}
return (users[username].connection_status == OFFLINE && if_user_exists(username) != 0 &&
users[username].password == <PASSWORD>);
}
void update_time(string username) //modify
{
time(&(users[username].last_active_time));
}
void block_user(string username) //modify
{
users[username].block_status = BLOCKED;
time(&users[username].blocked_time);
}
int if_blocked(string username)
{
time_t now;
time(&now);
if (users[username].block_status == BLOCKED && difftime(now,users[username].blocked_time) < BLOCK_TIME) {
return 1;
}
else
return 0;
}
void get_online(string username,int Nsocket)//modify
{
users[username].connection_status = ONLINE;
online_users.push_back(username);
users[username].socket_num = Nsocket;
update_time(username);
}
void get_offline(string username)// modify
{
users[username].connection_status = OFFLINE;
online_users.erase(remove(online_users.begin(),
online_users.end(), username), online_users.end());
users[username].socket_num = -1;
update_time(username);
}
string get_online_user(string selfname)
{
stringstream ss("");
for (int i = 0; i < online_users.size() ; i ++)
{
if (online_users[i] != selfname)
{
ss<<online_users[i];
ss<<" ";
}
}
return ss.str();
}
string get_offline_user(int duration)
{
time_t now;
time(&now);
stringstream ss;
ss.str("");
for (map<string,Client_user>::iterator i = users.begin(); i != users.end(); i ++)
{
if (((i->second).connection_status == OFFLINE) && difftime(now,(i->second).last_active_time) < duration*60)
{
ss<<(i->first);
ss<<" ";
}
}
return ss.str();
}
int private_message_handler(string sender, string cur_content, string &reply_content) //modify
{
stringstream ss;
ss.str(cur_content);
string receiver;
ss>>receiver;
if (!users[receiver].in_blacklist(sender))
{
reply_content = get_content(cur_content);
reply_content = sender+":"+reply_content;
if (users[receiver].socket_num == -1) {
users[receiver].offline_message.push_back(reply_content);
}
return users[receiver].socket_num;
}
return -2;
}
void offline_message_handler(string username) //modify
{
if (! users[username].offline_message.empty())
{
//cout<<">>>>>>>>>>>"<<endl;
string temp = "Offline message >>>";
char buffer[BUFFER_SIZE];
while (! users[username].offline_message.empty()) {
temp = temp + " "+ users[username].offline_message.back();
users[username].offline_message.pop_back();
}
integrate_message(buffer, CLIENT_DISP, temp);
write(users[username].socket_num, buffer, strlen(buffer));
}
}
};
#endif<file_sep># 4119
COMS4119 Computer networks
<file_sep>#include "./util.h"
using namespace std;
int main(int argc, char *argv[])
{
if (argc != 6) {
cout<<"wrong input!"<<endl;
exit(1);
}
string filename = argv[1];
int listen_port = atoi(argv[2]);
string sender_ip = argv[3];
int sender_port = atoi(argv[4]);
string log_file = argv[5];
int udp_sender_port = 10000;
thatip = sender_ip;
thatackport = to_string(sender_port);
thisport = to_string(listen_port);
std::ofstream logstream;
if(log_file != "stdout") {
logstream.open(log_file);
sbuf = logstream.rdbuf();
} else {
sbuf = std::cout.rdbuf();
}
//std::ostream out(sbuf);
logout.rdbuf(sbuf);
udp_socket s;
s.set_port(listen_port);
s.set_cp(sender_ip,udp_sender_port);
tcp_socket s_tcp;
s_tcp.conn(sender_ip, sender_port);
ofstream ofs;
ofs.open(filename);
tcp_header r_header;
int base_num = r_header.seq_num;
r_header.seq_num --;
//cout<<"base_seq:"<<base_num<<endl;
while (1) {
string r_str = s.receive_packet(&r_header);
if (r_header.seq_num == base_num) {
s_tcp.send_ack(base_num + r_str.length());
base_num +=r_str.length();
ofs.write(r_str.c_str(), r_str.length());
cout<<"written content length: "<<r_str.length()<<endl;
//cout<<"base number: "<<base_num<<endl;
if (r_str.length() != SEG_SIZE)
break;
}
}
ofs.close();
while (r_header.fin != 1) {
string r_str = s.receive_packet(&r_header);
}
if (log_file != "stdout") {
logstream.close();
}
s.shut();
s_tcp.shut();
return 0;
}<file_sep>#include "./util.h"
using namespace std;
int window_size = 2;
mutex ack_count_l;
int ack_seq;
tcp_socket s_ack;
int un_acked_count = 0;
int ack_op = 1;
int TIME_OUT = 500;
struct packpair{
string content;
tcp_header header;
int expected_ack_seq;
packpair(string c, tcp_header h, int a) : content(c), header(h), expected_ack_seq(a){}
};
queue<packpair> unacked_sent;
mutex queue_l;
void ack_handler()
{
while (ack_op) {
ack_seq = s_ack.receive_ack();
// cout<<"ack_seq:"<<ack_seq<<endl;
// cout<<"expected:"<<unacked_sent.front().expected_ack_seq<<endl;
// cout<<(ack_seq == unacked_sent.front().expected_ack_seq)<<endl;
if (ack_seq == unacked_sent.front().expected_ack_seq) {
queue_l.lock();
unacked_sent.pop();
queue_l.unlock();
ack_count_l.lock();
un_acked_count --;
ack_count_l.unlock();
}
}
}
int main(int argc, char *argv[])
{
if (argc != 7) {
//cout<<argv[7]<<endl;
cout<<"wrong input!"<<endl;
exit(1);
}
string filename = argv[1];
string remote_ip = argv[2];
int remote_port = atoi(argv[3]);
int ack_port = atoi(argv[4]);
string log_file = argv[5];
window_size = atoi(argv[6]);
thatip = remote_ip;
thatport = to_string(remote_port);
thisackport = to_string(ack_port);
//std::streambuf * sbuf;
std::ofstream logstream;
if(log_file != "stdout") {
logstream.open(log_file);
sbuf = logstream.rdbuf();
} else {
sbuf = std::cout.rdbuf();
}
//std::ostream out(sbuf);
logout.rdbuf(sbuf);
//logout<<timenow()<<endl;
int udp_send_port = 10000;
udp_socket s;
s.set_port(udp_send_port);
s.set_cp(remote_ip,remote_port);
tcp_socket s_tcp;
s_tcp.set_port(ack_port);
cout<<"wating for receiver..."<<endl;
s_ack = s_tcp.acpt();
cout<<"receiver accepted"<<endl;
ifstream ifs;
ifs.open(filename.c_str());
char seg_buff[SEG_SIZE];
tcp_header s_header;
s_header.s_port = 4118;
s_header.d_port = 4119;
thread ack_t(ack_handler);
ack_t.detach();
while (un_acked_count < window_size) {
int s_index = ifs.tellg();
ifs.seekg(0,ifs.end);
int e_index = ifs.tellg();
ifs.seekg(s_index);
int cur_size = min(SEG_SIZE, e_index - s_index);
ifs.read(seg_buff, SEG_SIZE);
string temp_s = seg_buff;
temp_s = temp_s.substr(0,cur_size);
ack_count_l.lock();
s.send_packet(s_header, temp_s);
queue_l.lock();
unacked_sent.push(packpair(temp_s, s_header, s_header.seq_num + temp_s.length()));
queue_l.unlock();
un_acked_count++;
ack_count_l.unlock();
//cout<<"packet sent -- seq num:"<<s_header.seq_num<<endl;
s_header.seq_num += temp_s.length();
//cout<<"segment sent"<<endl;
int l = ifs.gcount();
if (l != SEG_SIZE)
break;
int time_count = 0;
while (un_acked_count == window_size ) {
if (time_count++ < TIME_OUT) {
usleep(1);
}
else
{
queue<packpair> cp = unacked_sent;
while (!cp.empty()) {
s.send_packet(cp.front().header, cp.front().content);
cp.pop();
}
}
}
}
ifs.close();
tcp_header fin_header;
fin_header.fin = 1;
s.send_packet(fin_header, "");
if (log_file != "stdout") {
logstream.close();
}
//s.send_packet("test");
ack_op = 0;
//ack_t.join();
s.shut();
s_tcp.shut();
s_ack.shut();
return 0;
}
<file_sep>list_of_string = ["computer networks",\
"computer", "network", "net", "columbia university",\
"columbia", "united states of america", "united", "united states" ]
remove_list = [x for y in list_of_string for x in list_of_string if (x in y and x != y)]
result_list = [x for x in list_of_string if x not in remove_list]
print(result_list)
# for x in list_of_string:
# print("x: ",x)
# for y in list_of_string:
# print("y: ",y)
# if y in x and y!=x:
# list_of_string.remove(y)
# print("removing ",y)
# print(list_of_string)
<file_sep># MACROS
CXX = g++
CXXFLAGS = -O3 -Wall -std=c++0x
LIBS = -pthread
OBJS = Server.o Client.o
EXES = Server Client
# TARGETS
all: $(EXES)
Server: Server.o
$(CXX) -o Server Server.o $(CXXFLAGS) $(LIBS)
Server.o: Server.cc util.h
$(CXX) -o Server.o -c Server.cc $(CXXFLAGS) $(LIBS)
Client: Client.o
$(CXX) -o Client Client.o $(CXXFLAGS) $(LIBS)
Client.o: Client.cc util.h
$(CXX) -o Client.o -c Client.cc $(CXXFLAGS) $(LIBS)
# REMOVAL
clean:
rm -rf $(OBJS) $(EXES)<file_sep>#include "./util.h"
using namespace std;
int main()
{
tcp_header th;
cout<<sizeof(th)<<endl;
return 0;
}<file_sep>#include "./util.h"
mutex cout_l;
volatile int logout = 0;
int command_parser(string &content)
{
string firstword, temp_str;
stringstream ss;
ss.str(content);
int result = -1;
ss>>firstword;
if(firstword.empty())
exit(1);
if (firstword == "logout")
result = LOGOUT;
else if(firstword == "whoelse")
result = WHOELSE;
else if(firstword == "wholast")
result = WHOLAST;
else if(firstword == "message")
result = MESSAGE_TO;
else if(firstword == "broadcast")
{
ss>>temp_str;
if (temp_str == "message")
result = BROAD_MESSAGE;
else if(temp_str == "user")
result = BROAD_USER;
}
else if(firstword == "block")
result = BLACK_ADD;
else if(firstword == "unblock")
result = BLACK_REMOVE;
else
{
cout<<"undefined command"<<endl;
result = -1;
}
content = get_content(content);
return result;
}
void output_handler(int socket_client)
{
char buffer[BUFFER_SIZE];
while (1) {
bzero(buffer,BUFFER_SIZE);
read(socket_client,buffer, BUFFER_SIZE);
if (logout == 1)
return;
int cur_command = get_command(buffer);
string cur_content = get_content(buffer);
if (cur_command == CLIENT_LIST)
{
cout_l.lock();
cout<<endl;
list_display(cur_content);
cout_l.unlock();
}
else if (cur_command == CLIENT_DISP)
{
cout_l.lock();
cout<<endl;
cout<< cur_content<<endl;
cout_l.unlock();
}
else if (cur_command == LOGOUT)
return;
}
}
void login_handler(char *buffer, int socket_client)
{
stringstream ss;
do{
if(get_command(buffer) == LOGIN_DENIED)
cout<<"Wrong username/password or already-log-in user"<<endl;
cout<<"Input your username and password"<<endl;
ss.str("");
cout<<"Username: ";
cin >> ss;
ss<<" ";
cout<<"Password: ";
cin >> ss;
integrate_message(buffer, USERINFO, ss.str());
write(socket_client,buffer,strlen(buffer));
bzero(buffer,BUFFER_SIZE);
read(socket_client,buffer, BUFFER_SIZE);
if (get_command(buffer) == LOGIN_BLOCKED)
{cout<<"login is blocked for some time, please login later"<<endl; exit(1);}
} while(get_command(buffer) == LOGIN_DENIED);
cout<<"Welcome to Simple Chat!"<<endl;
cout<<"Input your command to start!"<<endl;
return;
}
int main(int argc, char *argv[])
{
int socket_client;
struct sockaddr server_addr;
struct addrinfo *serverinfo;
char buffer[BUFFER_SIZE];
if (argc < 3)
{ cout<<"inadequate input"<<endl; exit(1);}
if ((socket_client = socket(AF_INET,SOCK_STREAM,0)) < 0)
{ cout<<"creating socket error"<<endl; exit(1);}
if (getaddrinfo(argv[1], argv[2], NULL, &serverinfo) < 0)
{ cout<<"getaddrinfo error"<<endl; exit(1); }
memcpy(&server_addr, serverinfo->ai_addr, serverinfo->ai_addrlen);
if (connect(socket_client, &server_addr, sizeof(server_addr)) < 0)
{ cout<<"connect error"<<endl; exit(1); }
integrate_message(buffer,REQUEST_CONNECT);
write(socket_client,buffer,strlen(buffer));
cout<<"REQUEST_CONNECT sent"<<endl;
bzero(buffer,BUFFER_SIZE);
read(socket_client, buffer, BUFFER_SIZE);
if (get_command(buffer) != REQUEST_USERINFO)
{
cout<<"Unexpected server bahavior."<<endl;
exit(1);
}
login_handler(buffer, socket_client);
thread t1(output_handler,socket_client);
int cur_command = -1;
cin.ignore();
while(logout != 1)
{
string cur_content;
cout_l.lock();
cout<<">>>";
cout_l.unlock();
getline(cin, cur_content);
cur_command = command_parser(cur_content);
if (cur_command >= 0 &&cur_command != LOGOUT)
{
integrate_message(buffer, cur_command, cur_content);
write(socket_client,buffer,strlen(buffer));
}
else
{
logout = 1;
}
}
integrate_message(buffer, LOGOUT);
write(socket_client,buffer,strlen(buffer));
cout<<"client logged out"<<endl;
t1.join();
close(socket_client);
freeaddrinfo(serverinfo);
return 0;
}
<file_sep>#include "./util.h"
vector<thread> threads;
user_map users;
mutex user_map_lock;
void user_pass_handler(user_map &users)
{
ifstream user_pass("user_pass.txt");
if(user_pass.is_open())
{
string line, username, pwd;
Client_user client_user;
while(getline(user_pass,line))
{
stringstream ss(line);
ss>>username>>pwd;
users.initial_user(username,pwd);
}
user_pass.close();
}
else
{
cout<<"unable to open user_pass.txt"<<endl;
exit(1);
}
}
string login_handler(char* buffer, user_map *users, int new_socket)
{
string username;
bzero(buffer,BUFFER_SIZE);
read(new_socket, buffer, BUFFER_SIZE);
if (get_command(buffer) == USERINFO)
{
cout<<"USERINFO received"<<endl;
stringstream ss(get_content(buffer));
string pwd;
ss>>username>>pwd;
int login_count= 1;
if ((*users).if_blocked(username)) {
integrate_message(buffer, LOGIN_BLOCKED);
write(new_socket, buffer, strlen(buffer));
return "";
}
while(!(*users).correct_password(username,pwd) and
login_count < CONSECUTIVE_FAILURES)
{
login_count++;
integrate_message(buffer,LOGIN_DENIED);
cout<<"LOGIN_DENIED"<<endl;
write(new_socket, buffer, 1);
bzero(buffer,BUFFER_SIZE);
read(new_socket, buffer, BUFFER_SIZE);
stringstream ss(get_content(buffer));
ss>>username>>pwd;
}
if ((*users).correct_password(username,pwd) == 2) {
integrate_message(buffer, LOGIN_BLOCKED);
write(new_socket, buffer, 1);
return "";
}
if ((*users).correct_password(username,pwd))
{
user_map_lock.lock();
(*users).get_online(username,new_socket);
user_map_lock.unlock();
integrate_message(buffer,AUTHENTICATED);
cout<<"AUTHENTICATED"<<endl;
write(new_socket, buffer, 1);
}
else
{
integrate_message(buffer,LOGIN_BLOCKED);
cout<<"LOGIN_BLOCKED"<<endl;
user_map_lock.lock();
(*users).block_user(username);
user_map_lock.unlock();
write(new_socket,buffer,1);
username = "";
}
}
return username;
}
void command_handler(string selfname, int cur_socket, char* buffer, user_map *users)
{
user_map_lock.lock();
(*users).update_time(selfname);
user_map_lock.unlock();
int cur_command = get_command(buffer);
string cur_content = get_content(buffer);
int reply_command = IGNORE;
string reply_content;
int reply_socket = -1;
if (cur_command == WHOELSE)
{
reply_content = (*users).get_online_user(selfname);
reply_command = CLIENT_LIST;
reply_socket = cur_socket;
integrate_message(buffer,reply_command,reply_content);
write(reply_socket,buffer,strlen(buffer));
}
else if (cur_command == WHOLAST)
{
int duration = 0;
stringstream ss;
ss.str(cur_content);
ss>>duration;
reply_socket = cur_socket;
reply_command = CLIENT_LIST;
reply_content = (*users).get_online_user(selfname) + (*users).get_offline_user(duration);
integrate_message(buffer,reply_command,reply_content);
write(reply_socket,buffer,strlen(buffer));
}
else if (cur_command == MESSAGE_TO)
{
reply_command = CLIENT_DISP;
user_map_lock.lock();
reply_socket = (*users).private_message_handler(selfname, cur_content, reply_content);
user_map_lock.unlock();
if (reply_socket >= 0) {
integrate_message(buffer,reply_command,reply_content);
write(reply_socket,buffer,strlen(buffer));
}
}
else if (cur_command == BROAD_MESSAGE)
{
reply_command = CLIENT_DISP;
reply_content = get_content(cur_content);
reply_content = selfname+":"+reply_content;
for (int i = 0; i < (*users).online_users.size(); i++)
{
reply_socket = (*users).users[(*users).online_users[i]].socket_num;
string it_name = (*users).online_users[i];
if (it_name != selfname && !(*users).users[it_name].in_blacklist(selfname))
{
integrate_message(buffer, reply_command, reply_content);
write(reply_socket, buffer, strlen(buffer));
}
}
}
else if (cur_command == BROAD_USER)
{
reply_command = CLIENT_DISP;
cur_content = get_content(cur_content);
stringstream ss;
ss.str(cur_content);
string temp;
ss>>temp;
vector<string> receivers;
while (temp != "message")
{
if (find((*users).online_users.begin(), (*users).online_users.end(), temp) != (*users).online_users.end())
{
receivers.push_back(temp);
}
ss >> temp;
}
getline(ss, reply_content);
reply_content = reply_content.substr(1,reply_content.length());
reply_content = selfname + ":" + reply_content;
for (int i = 0; i < receivers.size(); i ++) {
if (!(*users).users[receivers[i]].in_blacklist(selfname))
{
integrate_message(buffer, CLIENT_DISP,reply_content);
write((*users).users[receivers[i]].socket_num, buffer, strlen(buffer));
}
}
}
else if (cur_command == BLACK_ADD)
{
user_map_lock.lock();
(*users).users[selfname].black_enlist(cur_content);
user_map_lock.unlock();
}
else if (cur_command == BLACK_REMOVE)
{
user_map_lock.lock();
(*users).users[selfname].black_unenlist(cur_content);
user_map_lock.unlock();
}
}
void client_handler(user_map *users, int new_socket)
{
char buffer[BUFFER_SIZE];
bzero(buffer,BUFFER_SIZE);
if (read(new_socket, buffer, BUFFER_SIZE) < 0)
{cout<<"reading new_socket error!"<<endl;exit(1);}
if (get_command(buffer) == REQUEST_CONNECT)
{
cout<<"REQUEST_CONNECT received."<<endl;
integrate_message(buffer,REQUEST_USERINFO);
write(new_socket, buffer, 1);
cout<<"REQUEST_USERINFO sent."<<endl;
string username = login_handler(buffer,users,new_socket);
if (username == "")
{
close(new_socket);
return;
}
int cur_command;
usleep(100);
user_map_lock.lock();
(*users).offline_message_handler(username);
user_map_lock.unlock();
do{
bzero(buffer,BUFFER_SIZE);
read(new_socket, buffer, BUFFER_SIZE);
cur_command = get_command(buffer);
time_t now;
time(&now);
if (difftime(now, (*users).users[username].last_active_time) > TIME_OUT * 60)
break;
command_handler(username, new_socket, buffer, users);
}while(cur_command != LOGOUT);
user_map_lock.lock();
(*users).get_offline(username);
user_map_lock.unlock();
}
close(new_socket);
}
int main(int argc, char *argv[])
{
user_pass_handler(users);
int socket_server;
signal(SIGPIPE, SIG_IGN);
if(argc < 2)
{ cout<<"inadequate input"<<endl;exit(1);}
if ((socket_server = socket(AF_INET,SOCK_STREAM,0)) < 0)
{cout<<"creating socket error"<<endl;exit(1);}
struct sockaddr_in server_addr, client_addr;
server_addr.sin_family = AF_INET;
server_addr.sin_addr.s_addr = INADDR_ANY;
server_addr.sin_port = htons(atoi(argv[1]));
if (::bind(socket_server,
(struct sockaddr *) &server_addr, sizeof(server_addr)) < 0)
{ cout<<"bind error"<<endl; exit(1);}
cout<<"Server starts listening..."<<endl;
listen(socket_server,5);//Initialization finished, start listening
while(1)
{
socklen_t client_addr_len = sizeof(client_addr);
int new_socket = accept(socket_server,
(struct sockaddr *) &client_addr, &client_addr_len);
if (new_socket >=0)
threads.push_back(thread(client_handler,
&users, new_socket));
}
return 0;
}
|
d116faefd2389b08cb411c7a845fba9e976e3f71
|
[
"Markdown",
"Python",
"Makefile",
"C++"
] | 12 |
Makefile
|
D-vindicator/computer-networks-projects
|
40d10325778f9c4a1df47711dac899e65843adb3
|
02f798578c7de4b1cce359c8f4dc1d8398cd2230
|
refs/heads/main
|
<repo_name>srdicmonika/moni-example-projects<file_sep>/DoorMovement.cs
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class DoorMovement : MonoBehaviour {
// The parts of the door
public GameObject doorUP;
public GameObject doorDO;
public GameObject doorLE;
public GameObject doorRI;
private float startZPosition;
public float startCoolDown; // Amount of seconds to wait before the door starts opening-process
public float speed; // Speed of moving
public float howLongMoving; // how long do they separate
public float differenceMoveBehind; // basically how far do they move to the back
public float speedMoveBehind; // speed at which they move back
public float pauseBetweenBehindSeparate;
private bool isActivated = false; // Bewegt sich nur, wenn aktiviert
private bool movedBehind = false; // Gibt on, ob sie sich schon vollst. nach hinten bewegt hat oder nicht
// Use this for initialization
void Start () {
startZPosition = doorUP.transform.position.z;
}
// Update is called once per frame
void Update () {
if (isActivated)
{
move();
}
}
public void openDoor()
{
if (!isActivated)
{
StartCoroutine(startingCooldown());
}
}
public void move()
{
if (movedBehind)
{
// Sie bewegen sich auseinander
doorUP.transform.position += Vector3.up * Time.deltaTime * speed;
doorDO.transform.position += Vector3.down * Time.deltaTime * speed;
doorLE.transform.position += Vector3.back * Time.deltaTime * speed;
doorRI.transform.position += Vector3.forward * Time.deltaTime * speed;
}
else
{
// Sich bewegen sich alle kollektiv nach hinten
doorUP.transform.position += Vector3.right * Time.deltaTime * speedMoveBehind;
doorDO.transform.position += Vector3.right * Time.deltaTime * speedMoveBehind;
doorLE.transform.position += Vector3.right * Time.deltaTime * speedMoveBehind;
doorRI.transform.position += Vector3.right * Time.deltaTime * speedMoveBehind;
Debug.Log(doorUP.transform.position.z - startZPosition);
// Wenn Differenz aus Start und momentaner position eine schwelle überschreibtet, geht die Bewegung in separieren über
if ((doorUP.transform.position.z - startZPosition) >= differenceMoveBehind)
{
isActivated = false;
StartCoroutine(switchToSeparationMovement());
StartCoroutine(endOpeningTimer());
}
}
}
IEnumerator startingCooldown()
{
yield return new WaitForSeconds(startCoolDown);
isActivated = true;
}
IEnumerator endOpeningTimer()
{
yield return new WaitForSeconds(howLongMoving);
isActivated = false;
}
IEnumerator switchToSeparationMovement()
{
yield return new WaitForSeconds(pauseBetweenBehindSeparate);
movedBehind = true;
isActivated = true;
}
}
<file_sep>/TestHelper.cs
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class TestHelper : MonoBehaviour {
public bool switchIntoWorkMode;
public Material transparentMat;
public Material fixMat;
public Light directionalLight;
public GameObject[] roomComponents;
public void OnValidate()
{
if (switchIntoWorkMode)
{
for (int i = 0; i < roomComponents.Length; i++)
{
roomComponents[i].GetComponent<Renderer>().material = transparentMat;
}
directionalLight.enabled = true;
}
else
{
for (int i = 0; i < roomComponents.Length; i++)
{
roomComponents[i].GetComponent<Renderer>().material = fixMat;
}
directionalLight.enabled = false;
}
}
}
<file_sep>/TerrainGenerator.cpp
// TerrainGenerator.cpp : Defines the entry point for the console application.
//
#include "stdafx.h"
#include "TerrainGenerator.h"
#include <cstdlib>
#include <iostream>
#include <fstream>
#include <windows.h>
#include <string>
#include <SimpleImage.h>
#include <random>
#include <time.h>
#include <cstdlib>
#include <math.h>
#define IDX(x,y,w) ((x)+(y)*(w)) // x=spalte y=zeile w=breite des array
#define SQR(x) ((x)*(x))
default_random_engine generator; // keine Zeit daher immer fix
//globale texturen
//funktioniert im terrainGenerator nur mit 3 "../"
//-> zum testen also einmal ../ entfernen oder im ResourceGenerator starten
GEDUtils::SimpleImage tex_tief_flach("../../../../external/textures/gras15.jpg"); //c0
GEDUtils::SimpleImage tex_tief_steil("../../../../external/textures/mud02.jpg"); //c1
GEDUtils::SimpleImage tex_hoch_flach("../../../../external/textures/rock6.jpg");//c2
GEDUtils::SimpleImage tex_hoch_steil("../../../../external/textures/rock8.jpg"); //c3
//globale variable
const int g_verhaeltnis = 2;
int _tmain(int argc, _TCHAR* argv[])
{
// Überprüfung der EingabeParameter
if ((argc != 9) || (_tcscmp(argv[1], _TEXT("-r")) != 0) || (_tcscmp(argv[3], _TEXT("-o_height")) != 0) || (_tcscmp(argv[5], _TEXT("-o_color")) != 0) || (_tcscmp(argv[7], _TEXT("-o_normal")) != 0))
{
cout << "ERROR - Eingabe-Parameter inkorrekt" << endl;
system("PAUSE");
return 0;
}
int resolution = _tstoi(argv[2]);
srand(time(NULL));
const wchar_t *o_height = argv[4];
const wchar_t *o_color = argv[6];
const wchar_t *o_normal = argv[8];
if(resolution <= 0)
{
cout << "ERROR - Eingabe-Parameter inkorrekt" << endl;
system("PAUSE");
return 0;
}
float* map = new float[SQR(resolution + 1)];
float* map_downsized = new float[SQR((resolution / 4))];
Vector3* normalMap = new Vector3[SQR(resolution)];
alphas* alphaMap = new alphas[SQR(resolution)];
//map wird in diamond square neu ueberschrieben
cout << "Erstelle heightmap" << endl;
diamondSquare(map, resolution);
//map wird gesmootht
cout << "Smoothe heightmap" << endl;
cout << "This might take a few minutes..." << endl;
cout << "Enjoy a story meanwhile..." << endl;
string story[] = {
"Es ", "war ", "einmal ", "vor " , "langer " , "Zeit " , " ein Frosch " , "namens Friedrich.\n" ,
"Er lebte " , "in einem " , "kleinen Teich " , "am Rande " , "der Stadt " , "Muenchen.\n", "Eines Tages ", "beschloss er ", "eine Reise ",
"zu machen. ", "Er packte ", "dafuer\n", "seine 7 ", "Sachen und ", "huepfte los. ",
"Er wollte ", "als erstes ", "sich endlich ", "mal die\n", "technische ", "Universitaet ", "Muenchen ",
"ansehen. ", "<NAME> ", "hatte ihm ", "mal erzaehlt,\n", "dass dort ", "eine riesige ", "Rutsche ", "sei. Deshalb ", "machte er ", "sich auf "
, "die Suche.\n", "Leider reiste ", "er zuerst ", "zum Maschinenwesen. ", "Dort gab ", "es nur ", "komische ", "Flieger. ", "Und keiner ", "davon ",
"essbar :(\n", "Aber ein ", "Pinguin ", "gab ihm den ", "Tipp, dass ", "die Rutsche ", "nebenan sei. ", "Also ", "huepfte er ", "rueber. ",
"Dort stellte ", "er mit ", "Schrecken ", "fest, dass ", "man in den ", "dritten Stock ", "muss! ", "Aber er ", "war ja ein ", "MeisterHuepfer!\n",
"Als er ", "jedoch oben ", "ankam, sah ", "er, dass man ", "nur mit Matte ", "rutschen darf!! ", "Aber die ", "lagen alle unten:(\n", "Doch zu ", "seinem Glueck ",
"gab es einen ", "Fahrstuhl!! :D\n", "Diesen benutzte ", "er dann auch! ", "Und so ", "kam es ", "zu einem ", "legendaeren ", "Rutschrekord! ", "Der bis ",
"heute nie ", "gebrochen ", "wurde ;)\n ", "Und so ", "endet die ", "Geschichte ", "des meist ", "gerutschten Frosches ", "aller Zeiten!\n ", "ENDE\n ",
};
int smoothwert = 100;
for (int i = 0; i < smoothwert; i++) { //hab auf 100 gesetzt weil es sonst zu lange lädt
smoothArray(map, resolution, resolution);
if (smoothwert <= 100) {
cout << "" << story[i];
}
}
//Erstelle die NormalMap
cout << "Erstelle NormalMap" << endl;
generateNormalMap(map, normalMap, resolution);
//speichert die normalmap als .png Datei ---------------muss noch zu .tiff geändert werden
cout << "Output NormalMap" << endl;
outputNormalMap(normalMap, resolution, o_normal);
//erstellen der neuen texture
cout << "erstelle Texture" << endl;
createColorTexture(resolution, map, normalMap, o_color);
//downsized Heightmap
cout << "Downsizing HeightMap" << endl;
downsizing(map, map_downsized, resolution);
cout << map_downsized[IDX(0, 0, resolution / 4)] << endl;
cout << map_downsized[IDX(2, 4, resolution / 4)] << endl;
cout << map_downsized[IDX(12, 12, resolution / 4)] << endl;
cout << map_downsized[IDX(resolution/4-1, 0, resolution / 4)] << endl;
cout << map_downsized[IDX(resolution / 4 - 1, resolution / 4 - 1, resolution / 4)] << endl;
//speichert die downsized heightmap als .png Datei
cout << "Output downsized HeightMap" << endl;
outputHeightMap(map_downsized, resolution/4, o_height);
cout << "Alles ausgefuehrt!" << endl;
system("PAUSE");
return 0;
}
//Es wird jetzt doch keine alpha map erstellt sondern die alphawerte direkt in der funktion berechnet
/*
void calcAlphas(float height, float slope, float& alpha1, float& alpha2, float& alpha3) {
alpha1 = (1 - height) * slope;
alpha2 = height;
alpha3 = height * slope;
} // Problem: Very smooth blending, can you improve this?
//Speichern der Alphawerte in seperater Tabelle fürs Zugreifen
void calcAlphaMap(alphas* alphaMap, float* heightMap, Vector3* normalMap, int resolution) {
for (int y = 0; y < resolution; y++) {
for (int x = 0; x < resolution; x++) {
calcAlphas(heightMap[IDX(x, y, resolution)], (1 - normalMap[IDX(x, y, resolution)].z),
(alphaMap[IDX(x, y, resolution)].alpha1), (alphaMap[IDX(x, y, resolution)].alpha2), (alphaMap[IDX(x, y, resolution)].alpha3));
}
}
}
*/
//berechnet color anhand von alpha mapping
//und speichert sie in col
void getColor(int x, int y, float a1, float a2, float a3, color& col) {
//newColor = a3 * c3 + (1- a3) * (a2 * c2 +(1-a2) * (a1* c1 + (1- a1) * c0))
float r0, r1, r2, r3, g0, g1, g2, g3, b0, b1, b2, b3;
tex_tief_flach.getPixel((x % tex_tief_flach.getWidth()), (y % tex_tief_flach.getHeight()), r0, g0, b0);
tex_tief_steil.getPixel((x % tex_tief_steil.getWidth()), (y % tex_tief_steil.getHeight()), r1, g1, b1);
tex_hoch_flach.getPixel((x % tex_hoch_flach.getWidth()), (y % tex_hoch_flach.getHeight()), r2, g2, b2);
tex_hoch_steil.getPixel((x % tex_hoch_steil.getWidth()), (y % tex_hoch_steil.getHeight()), r3, g3, b3);
col.r = a3 * r3 + (1 - a3) * (a2 * r2 + (1 - a2) * (a1 * r1 + (1 - a1) * r0));
col.g = a3 * g3 + (1 - a3) * (a2 * g2 + (1 - a2) * (a1 * g1 + (1 - a1) * g0));
col.b = a3 * b3 + (1 - a3) * (a2 * b2 + (1 - a2) * (a1 * b1 + (1 - a1) * b0));
}
void createColorTexture(int resolution, float* heightMap, Vector3* normalMap, const wchar_t* o_color) {
GEDUtils::SimpleImage newTexture(resolution, resolution);
float r, g, b;
for (int y = 0; y < newTexture.getHeight(); y++) {
for (int x = 0; x < newTexture.getWidth(); x++) {
float alpha1, alpha2, alpha3;
float height, slope;
height = heightMap[(IDX(x, y, resolution))];
slope = 1 - (normalMap[IDX(x, y, resolution)].z * 2 - 1);
alpha1 = (1 - height) * slope;
alpha2 = height;
alpha3 = height * slope;
//schreibt farbe in col rein
color col;
getColor(x, y, alpha1, alpha2, alpha3, col);
//setzt farbe auf werte in col
newTexture.setPixel(x, y, col.r, col.g, col.b);
}
}
try {
if (!newTexture.save(o_color)) {
throw "Could not save color Texture";
}
}
catch (char* exception) {
printf("Error: %s\n", exception);
}
}
void downsizing(float* old, float* target, int w)
{
float temp;
for (int x = 0; x < w; x = x + 4)
{
for (int y = 0; y < w; y = y + 4)
{
temp = 0;
for (int a = 0; a < 4; a++)
{
for (int b = 0; b < 4; b++)
{
temp += old[IDX(x + a, y + b, w)];
}
}
target[IDX(x / 4, y / 4, w/4)] = (temp / 16.f);
}
}
}
void outputNormalMap(Vector3* map, int resolution, const wchar_t* o_height)
{
//heightmap wird in die ein Bild geladen und dann gespeichert
try {
Vector3 vector;
GEDUtils::SimpleImage normalMap(resolution, resolution);
for (unsigned int y = 0; y<normalMap.getHeight(); y++) {
for (unsigned int x = 0; x<normalMap.getWidth(); x++) {
vector = map[IDX(x, y, resolution)];
normalMap.setPixel(x, y, vector.x, vector.y, vector.z);
}
}
if (!normalMap.save(o_height)) {
throw "Could not save NormalMap";
}
}
catch (char* exception) {
printf("Error: %s\n", exception);
}
}
void generateNormalMap(float* heightMap, Vector3* normalMap, int w)
{
float xDiff, yDiff , vectorLength;
float newX, nexY, newZ;
Vector3 normal;
for (int y = 0; y < w; y++)
{
for (int x = 0; x < w;x++)
{
if (x ==0)
xDiff = (heightMap[IDX(x + 1, y, w)] - heightMap[IDX(x, y, w)]) *w;
// * w damit man von höhendifferenz in pixel umrechnet
else
xDiff = (((heightMap[IDX(x + 1, y, w)] - heightMap[IDX(x - 1, y, w)]) / 2) * w);
if(y==0)
yDiff = (heightMap[IDX(x, y + 1, w)] - heightMap[IDX(x, y, w)]) *w;
else
yDiff = (((heightMap[IDX(x, y + 1, w)] - heightMap[IDX(x, y - 1, w)]) / 2)* w);
vectorLength = sqrtf(SQR(xDiff) + SQR(yDiff) + 1);
//Normalizing und positiv machen
normal.x = ((-xDiff / vectorLength) + 1) / 2;
normal.y = ((-yDiff / vectorLength) + 1) / 2;
normal.z = ((1 / vectorLength) + 1) / 2;
//cout << normal.x << " - " << normal.y << " - " << normal.z << endl;
normalMap[IDX(x, y, w)] = normal;
}
}
}
void outputHeightMap(float* map, int resolution, const wchar_t* o_height)
{
//heightmap wird in die ein Bild geladen und dann gespeichert
try {
GEDUtils::SimpleImage heightMap(resolution, resolution);
for (unsigned int y = 0; y<heightMap.getHeight(); y++) {
for (unsigned int x = 0; x<heightMap.getWidth(); x++) {
heightMap.setPixel(x, y, map[IDX(x, y, resolution)]);
}
}
if (!heightMap.save(o_height)) {
throw "Could not save gray image";
}
}
catch (char* exception) {
printf("Error: %s\n", exception);
}
}
//rundet terrain etwas ab
void smoothArray(float* field, int width, int height)
{
float* temp = new float[width*height];
for (int i = 0; i < (width*height); i++)
temp[i] = field[i];
for (int i = 0; i < height; i++)
{
for (int j = 0; j < width; j++)
{
float sum = 0;
int counter = 0;
for (int a = -1; a < 2; a++)
{
for (int b = -1; b < 2;b++)
{
if ((j + b) >= 0 && (i + a) >= 0 && (i + a) < height && (j + b) < width)
{
sum += temp[IDX(j + b, i + a, width)];
counter++;
}
}
}
field[IDX(j, i, width)] = (sum / counter * 10) / 10;
}
}
delete temp;
}
//Array erstellen für Diamond Square
float* newArray(int resolution) {
//n = resolution
float* map = new float[SQR(((resolution)+1)^2)];
//Ebene soll schon schräg anfangen
map[(IDX(0, 0, ((resolution) + 1)))] = randomNumber(); //1. Eckpunkt
map[(IDX(0, (resolution), ((resolution) + 1)))] = randomNumber(); //2. Eckpunkt
map[(IDX((resolution), 0, ((resolution) + 1)))] = randomNumber(); //3. Eckpunkt
map[(IDX((resolution), (resolution), ((resolution) + 1)))] = randomNumber(); //4. Eckpunkt
return map;
}
//generiert random Nummer damit man das nicht immer hinschreiben muss - nur positive werte
float randomNumber() {
float random;
normal_distribution<float> distribution(0.0, 1.0);
do
{
random = (distribution(generator) + 1) / 2;
} while ((random < 0.0f) || (random > 1.0f));
return random;
}
//gibt auch negative zahlen aus
float randomNumberPosNeg(float verhaeltnis) {
float random;
normal_distribution<float> distribution(0.0, verhaeltnis);
do
{
random = (distribution(generator));
} while ((random < -(verhaeltnis)) || (random > verhaeltnis));
return random;
}
//ich fueg hier mal direkt die zufallsnummern mit dazu!
//für Diamond Step
float middleValue4Z(float a, float b, float c, float d, float verhaeltnis) {
float random = randomNumberPosNeg(verhaeltnis);
float returnFloat = (((a + b + c + d) / 4) + random);
if (returnFloat > 1.f) {
returnFloat = 1.0;
}
if (returnFloat < 0.0) {
returnFloat = 0.0;
}
return returnFloat ;
}
//für Square Step
float middleValue3Z(float a, float b, float c, float verhaeltnis) {
float random = randomNumberPosNeg(verhaeltnis);
float returnFloat = (((a + b +c) / 3) + random);
if (returnFloat > 1.f) {
returnFloat = 1.0;
}
if (returnFloat < 0.0) {
returnFloat = 0.0;
}
return returnFloat;
}
//Diamond Step
void diamondStep(int x, int y ,int abstand,float* pointer, int w) {
int middleX = x + abstand / 2;
int middleY = y + abstand / 2;
float verhaeltnis = (float)abstand / ((float)w * g_verhaeltnis);
/*
A--------B
| |
| |
| |
C--------D
*/
float valueA = pointer[(IDX(x, y, w))];
float valueB = pointer[(IDX((x + abstand), y, w))];
float valueC = pointer[(IDX(x, (y + abstand), w))];
float valueD = pointer[(IDX((x + abstand), (y + abstand), w))];
pointer[(IDX(middleX, middleY, w))] = middleValue4Z(valueA, valueB, valueC, valueD, verhaeltnis);
}
//Abstand wird am Ende halbiert
void squareStep(int x, int y, int abstand, float* pointer, int w) {
float verhaeltnis = (float)abstand / ((float)w * g_verhaeltnis);
//alle Positionen oben und links(außer letzte Spalte und letzte Zeile)
int obenX = x + abstand / 2;
int obenY = y;
if (y - abstand / 2 >= 0)
{
pointer[(IDX(obenX, obenY, w))] = middleValue4Z(pointer[(IDX(x, y, w))], pointer[(IDX((x + abstand), y, w))],
pointer[(IDX(obenX, y-abstand/2, w))], pointer[(IDX(obenX, y + abstand / 2, w))], verhaeltnis);
}
else
{
pointer[(IDX(obenX, obenY, w))] = middleValue3Z(pointer[(IDX(x, y, w))], pointer[(IDX((x + abstand), y, w))],
pointer[(IDX(obenX, y + abstand / 2, w))], verhaeltnis);
}
int linksX = x;
int linksY = y + abstand / 2;
if (x - abstand / 2 >= 0)
{
pointer[(IDX(linksX, linksY, w))] = middleValue4Z(pointer[(IDX(x, y, w))], pointer[(IDX((x), (y+abstand), w))],
pointer[(IDX((x-abstand/2), obenY, w))], pointer[(IDX((x + abstand / 2), obenY, w))], verhaeltnis);
}
else
{
pointer[(IDX(linksX, linksY, w))] = middleValue3Z(pointer[(IDX(x, y, w))], pointer[(IDX((x), (y + abstand), w))],
pointer[(IDX((x + abstand / 2), obenY, w))], verhaeltnis);
}
}
void squareStep2(int i, int abstand, float* pointer, int w)
{
// befüllen der letzten Zeile und letzten Spalte
float verhaeltnis = (float)abstand / ((float)w * g_verhaeltnis);
int untenX = i + abstand / 2;
int untenY = w;
pointer[(IDX(untenX, untenY, w))] = middleValue3Z(pointer[(IDX(i, untenY, w))], pointer[(IDX((i + abstand), untenY, w))],
pointer[(IDX(untenX, (i + abstand / 2), w))], verhaeltnis);
int rechtsX = w;
int rechtsY = i + abstand / 2;
pointer[(IDX(rechtsX, rechtsY, w))] = middleValue3Z(pointer[(IDX(rechtsX, i, w))], pointer[(IDX(rechtsX, (i+abstand), w))],
pointer[(IDX((i + abstand / 2), rechtsY, w))], verhaeltnis);
}
void diamondSquare(float* pointer, int w) {
//pointer = newArray(w);
//Ebene soll schon schräg anfangen
pointer[(IDX(0, 0, ((w+1))))] = randomNumber(); //1. Eckpunkt
pointer[(IDX(0, (w), ((w+1))))] = randomNumber(); //2. Eckpunkt
pointer[(IDX((w), 0, ((w+1))))] = randomNumber(); //3. Eckpunkt
pointer[(IDX((w), (w), ((w+1))))] = randomNumber(); //4. Eckpunkt
int abstand = w;
while (abstand > 1) {
for (int y = 0; y < (w); (y = y + abstand)) {
for (int x = 0; x < (w); x = (x + abstand)) {
diamondStep(x, y, abstand, pointer, w);
squareStep(x, y, abstand, pointer, w);
// squareStep wird nur für Punkte links und oben außgeführt, sonst befüllt man manche Felder 2mal(Effizienz)
}
}
/*durch step squareStep sind alle felder für die SquareSteps befüllt, außer letzte Zeile und letzte Spalte
=> squreStep2 befüllt rest
*/
for (int i = 0; i < w; i = i + abstand)
{
squareStep2(i, abstand, pointer, w);
}
abstand = abstand / 2;
}
}<file_sep>/refugee_predictor.py
# Based on https://machinelearningmastery.com/sequence-classification-lstm-recurrent-neural-networks-python-keras/
# https://medium.com/@pushkarmandot/build-your-first-deep-learning-neural-network-model-using-keras-in-python-a90b5864116d
# Expecting CSV data in format:
# Country (string), avg, max, min, refugees, timestep (0 to train amount)
import csv
import numpy
from numpy import array
from numpy import asarray
from numpy import argmax
from numpy import arange
from numpy import unique
from numpy import reshape
import random
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
from keras.models import Sequential
from keras.models import load_model
from keras.layers import Dense
from keras.layers import LSTM
from keras.layers.convolutional import Conv1D
from keras.layers.convolutional import MaxPooling1D
from keras.layers.embeddings import Embedding
from keras.preprocessing import sequence
from keras.utils import to_categorical
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
from sklearn.preprocessing import MinMaxScaler
from sklearn.preprocessing import LabelEncoder, OneHotEncoder
from datetime import datetime
def get_country_data(x,y,country_encoder,country_name):
filtered_x = []
filtered_y = []
for i in range(len(x)):
index = int(x[i][0])
if (country_encoder[index] == country_name):
filtered_x.append(x[i])
filtered_y.append(y[i])
return asarray(filtered_x), asarray(filtered_y)
def save_plot(model, x, y, scaler_x, scaler_y, disaster_encoder, path, forecast, values_per_country):
x_scaled = scaler_x.transform(x)
expected = y
predicted = model.predict(x_scaled)
predicted = scaler_y.inverse_transform(predicted)
predicted_partial = []
expected_partial = []
start = 0
end = len(x)
for i in arange(start, end):
if ((i - start) % forecast == 0):
predicted_partial.append(predicted[i - start])
expected_partial.append(expected[i - start])
plot_data_predicted = [num for elem in predicted_partial for num in elem]
plot_data_expected = [num for elem in expected_partial for num in elem]
end = len(plot_data_expected)
# Set figure size
fig_size = plt.rcParams["figure.figsize"]
fig_size[0] = 20
fig_size[1] = 9
plt.rcParams["figure.figsize"] = fig_size
# Plot
fig, ax1 = plt.subplots()
left, bottom, width, height = [0.125, -0.1, 0.776, 0.2]
ax2 = fig.add_axes([left, bottom, width, height])
time = arange(start, end)
ax1.plot(time, plot_data_predicted, 'g--', label='Predicted')
time = arange(start, end)
ax1.plot(time, plot_data_expected, 'tab:orange', label='Real Data')
time = arange(start,end,forecast)
ax1.plot(time,[plot_data_predicted[i - start] for i in arange(start,end,forecast)], 'go', label='Start Point for Forecast')
ax1.set_xlabel('Time')
ax1.legend(loc='upper left')
end = len(x)
time = arange(start, end)
avg_temps = [elem[1] for elem in x]
max_temps = [elem[2] for elem in x]
min_temps = [elem[3] for elem in x]
ax2.plot(time, max_temps, 'r-', label='Maxmimum')
ax2.plot(time, avg_temps, 'g-', label='Average')
ax2.plot(time, min_temps, 'b-', label='Minimum')
disasters_x = []
disasters_y = []
disaster_y_pos = min(min_temps) - 5
for i in range(0, len(x)):
dis_index = int (x[i][4])
if (disaster_encoder[dis_index] != '*'):
disasters_x.append(i)
disasters_y.append(disaster_y_pos)
ax2.plot(disasters_x, disasters_y, 'ro', label='Disasters')
ax2.legend(loc='upper left')
ax1.set_ylabel('Refugees', color='C1')
ax2.set_ylabel('Temperature', color='b')
left, bottom, width, height = [0.25, 0.6, 0.2, 0.2]
# Save figure
# plt.show()
plt.savefig(path + '.svg', dpi=500, additional_artists=ax2, bbox_inches="tight", format="svg", transparent=True)
print ("Graph saved to " + path + '.svg')
plt.close()
def load_data(file, lookback, forecast,vpc):
x = []
y = []
countries = []
disasters = []
with open(file, newline='') as csvfile:
rows = csv.reader(csvfile, delimiter=',', quotechar='"')
for row in rows:
country, avg, max, min, disaster, refugees, time = row
x.append((int(time), float(avg), float(
max), float(min), int(refugees)))
y.append((int(time), int(refugees)))
countries.append(country)
disasters.append(disaster)
csvfile.close()
countries = array(countries)
disasters = array(disasters)
# Hot one encoding of countries
country_encoder, countries_encoded = unique(countries, return_inverse=True)
disaster_encoder, disasters_encoded = unique(
disasters, return_inverse=True)
x = zip(countries_encoded, disasters_encoded, x)
y = zip(countries_encoded, y)
# format now (country, time, avg, max, min, refugees)
# convert to [country] + [avg,max,min] + lookback * [avg,max,min,refugees]
last_values = []
x_final = []
y_final = []
for (country, disaster, (time, avg, max, min, refugees)) in x:
if (time == 0):
del last_values[:]
if (time > vpc - forecast):
continue
if (lookback == 0):
x_final.append([country, avg, max, min, disaster])
elif (time >= lookback):
x_final.append([country, avg, max, min, disaster] + last_values)
last_values.pop(0)
last_values.pop(0)
last_values.pop(0)
last_values.pop(0)
last_values.pop(0)
last_values.append(avg)
last_values.append(max)
last_values.append(min)
last_values.append(disaster)
last_values.append(refugees)
else:
last_values.append(avg)
last_values.append(max)
last_values.append(min)
last_values.append(disaster)
last_values.append(refugees)
del last_values[:]
for (country, (time, refugees)) in y:
if (time == 0):
if (last_values):
y_final.append(last_values.copy())
del last_values[:]
if (time < lookback):
continue
if (time - lookback > forecast - 1):
y_final.append(last_values.copy())
last_values.pop(0)
last_values.append(refugees)
else:
last_values.append(refugees)
y_final.append(last_values.copy())
return (x_final, y_final, country_encoder, disaster_encoder)
def split_data(x, y, lookback, forecast, train_amount,vpc):
x_train = []
y_train = []
x_test = []
y_test = []
for i in range(0, len(list(x))):
if (i % (vpc - forecast + 1) < train_amount):
x_train.append(x[i])
y_train.append(y[i])
else:
x_test.append(x[i])
y_test.append(y[i])
x_train = asarray(x_train)
y_train = asarray(y_train)
x_test = asarray(x_test)
y_test = asarray(y_test)
return (x_train, y_train, x_test, y_test)
def create_model(input_dimension,lookback,forecast):
# create model
model = Sequential()
model.add(Dense(units=4 + lookback * 4, kernel_initializer='random_uniform',
activation='relu', input_dim=input_dimension))
model.add(Dense(units=4 + lookback * 3,
kernel_initializer='random_uniform', activation='relu'))
model.add(Dense(units=4 + lookback * 2,
kernel_initializer='random_uniform', activation='relu'))
model.add(Dense(units=4 + lookback,
kernel_initializer='random_uniform', activation='relu'))
model.add(Dense(units=forecast, kernel_initializer='random_uniform', activation='relu'))
model.compile(optimizer='adam', loss='mean_squared_error', metrics=['mse'])
return model
def get_scalers(x_train, y_train):
scX = StandardScaler()
scY = MinMaxScaler()
scX.fit(x_train)
scY.fit(y_train)
return (scX, scY)
def train_model(model,x,y,l,f,n,s,indi,trainAmount,vpc):
# split dataset
x_train, y_train, x_test, y_test = split_data(x, y, l, f, trainAmount,vpc)
scX, scY = get_scalers(x_train, y_train)
# scale values
x_train_scaled = scX.transform(x_train)
x_test_scaled = scX.transform(x_test)
y_train_scaled = scY.transform(y_train)
y_test_scaled = scY.transform(y_test)
x_train = x_train_scaled
x_test = x_test_scaled
y_train = y_train_scaled
y_test = y_test_scaled
for i in range(1, s + 1):
model.fit(x_train, y_train, epochs=n, batch_size=trainAmount, verbose=2)
return model<file_sep>/IntroHandler.cs
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
using UnityEngine.SceneManagement;
public class IntroHandler : MonoBehaviour {
public Image ui_Image;
public Sprite mainMenu;
public Sprite tutorialScreen;
public SteamVR_TrackedObject trackedObj;
SteamVR_Controller.Device controller
{
get { return SteamVR_Controller.Input((int)trackedObj.index); }
}
void Awake()
{
trackedObj = GetComponent<SteamVR_TrackedObject>();
}
// Use this for initialization
void Start () {
ui_Image.sprite = mainMenu;
}
// Update is called once per frame
void Update () {
//if (controller == null || trackedObj == null)
//{
// Debug.Log("Controller not initilized");
// return;
//}
//else if (controller.GetPressDown(SteamVR_Controller.ButtonMask.Trigger))
//{
// Next();
//}
}
// Switches to tutorial and then to real game
void Next()
{
if (ui_Image.sprite == mainMenu)
{
ui_Image.sprite = tutorialScreen;
}
else
{
SceneManager.LoadScene("Game");
}
}
public void OnTriggerEnter(Collider col)
{
if (col.tag == "StartButton")
{
SceneManager.LoadScene("Game");
}
if (col.tag == "ControlButton")
{
Debug.Log("pressed Button");
if (ui_Image.sprite == mainMenu)
{
ui_Image.sprite = tutorialScreen;
}
else
{
ui_Image.sprite = mainMenu;
}
}
}
}
<file_sep>/FadeToBlack.cs
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class FadeToBlack : MonoBehaviour {
public GameObject plane;
Color black = Color.black;
float alpha = 0;
public void Start()
{
//plane.GetComponent<Renderer>().material.color = new Color(0, 0, 0, 0);
plane.GetComponent<Renderer>().enabled = false;
}
public void OnTriggerEnter(Collider col)
{
//fadeIntoBlack = true;
Debug.Log("Should fade to black");
//StartCoroutine(FadeIntoBlack());
plane.GetComponent<Renderer>().enabled = true;
}
public void OnTriggerExit(Collider col){
//fadeIntoBlack = false;
//StartCoroutine(FadeToScene());
plane.GetComponent<Renderer>().enabled = false;
}
IEnumerator FadeIntoBlack()
{
for (alpha = 0; alpha < 1; alpha += 0.1f)
{
Debug.Log("Start fading");
plane.GetComponent<Renderer>().material.color = new Color(0, 0, 0, alpha);
yield return new WaitForSeconds(0.5f);
}
}
IEnumerator FadeToScene()
{
for (alpha = 1; alpha > 0; alpha -= 0.1f)
{
plane.GetComponent<Renderer>().material.color = new Color(0, 0, 0, 0);
yield return new WaitForSeconds(0.5f);
}
}
}
<file_sep>/weather_data_importer.py
import re
import time
import datetime
import math
import sys
import csv
import itertools
# Stations File Column Indexes
STATION_ID = 0
COUNTRY = 8
# Data File Column Indexes
STATION_ID = 0
DATE = 2
TEMPERATURE = 3
# Natural Distaster Data Column Indexes
ND_COUNTRY = 0
ND_START_DATE = 1
ND_END_DATE = 2
ND_TYPE = 3
ND_SUBTYPE = 4
##
start_year = 1980
end_year = 2017
test_limit = -1
# For example, in monthly data:
#
# January 2005 = 2005 + (1 - 0.5) / 12 = 2005.042
# June 2008 = 2008 + (6 - 0.5) / 12 = 2008.458
def convert_date(date):
value = math.modf(float(date))
year = int(value[1])
month = int(round(value[0] * 12 + 0.5))
return (year, month)
def date_range(start, end):
start = re.split('/', start)
start_month = start[1]
start_year = int(start[2])
end = re.split('/', end)
end_month = end[1]
end_year = int(end[2])
if (start_month == ''):
start_month = 1
else:
start_month = int(start_month)
if (end_month == ''):
end_month = 12
else:
end_month = int(end_month)
l = list()
if (end_year > start_year):
month = start_month
year = start_year
while (year < end_year or (year == end_year and month <= end_month)):
if (month > 12):
month = 1
year += 1
l.append((year, month))
month += 1
else:
for month in range(start_month, end_month + 1):
l.append((start_year, month))
return l
def import_stations_from_file(location):
countries = set()
station_countries = dict()
with open(location) as stations:
for line in stations:
# Skip preamble
if (line[0] != '%'):
splitted = re.split(r'\t+', line)
station_countries[int(splitted[STATION_ID])] = splitted[COUNTRY].strip()
countries.add(splitted[COUNTRY].strip())
return (countries, station_countries)
# Load Natural Disaster Data
def import_natural_disaster_data(location):
global test_limit
limit = test_limit
with open(location) as csvfile:
print ("Reading file: " + location)
reader = csv.reader(csvfile, delimiter=',', quotechar='|')
all_countries_natural_disasters = dict()
for line in csvfile:
limit -= 1
splitted = re.split(',' , line)
country = splitted[ND_COUNTRY]
months = date_range(splitted[ND_START_DATE], splitted[ND_END_DATE])
type = splitted[ND_TYPE]
subtype = splitted[ND_SUBTYPE]
try:
all_countries_natural_disasters[country]
except KeyError:
all_countries_natural_disasters[country] = dict()
for month in months:
try:
all_countries_natural_disasters[country][month].append(str(type) + " " + str(subtype))
except KeyError:
all_countries_natural_disasters[country][month] = []
all_countries_natural_disasters[country][month].append(str(type) + " " + str(subtype))
if (limit == 0):
break
return all_countries_natural_disasters
# Load Temperature data (AVG, MIN, MAX) and ....
def import_from_file(location, countries, station_countries):
global test_limit
limit = test_limit
with open(location) as data:
monthly_values_all_stations = dict()
print ("Reading file: " + location)
# Get values as tuples (year,month,value) in a dictionary of countries
for line in data:
limit -= 1
if (line[0] != '%'):
splitted = re.split(r'\t+', line)
country = station_countries[int(splitted[STATION_ID])]
year, month = convert_date(splitted[DATE])
temp = splitted[TEMPERATURE]
if (year >= start_year):
try:
monthly_values_all_stations[country].append((year, month, temp))
except KeyError:
monthly_values_all_stations[country] = [(year,month,temp)]
if (limit == 0):
break
country_monthly_average_values = dict()
# Get average temperature from all stations for each month
print ("Aggregating...")
i = 0
for country in countries:
i += 1
sys.stdout.write('\r' + str(i) + ' out of ' + str(len(countries)) + ' countries')
sys.stdout.flush()
try:
values = monthly_values_all_stations[country]
monthly_values = dict()
# All temperature values from all stations for a month
for value in values:
year = value[0]
month = value[1]
temp = value[2]
try:
monthly_values[(year,month)].append(float(temp))
except KeyError:
monthly_values[(year,month)] = [float(temp)]
for item in monthly_values.items():
year_month_tuple = item[0]
temperature_values = item[1]
try:
country_monthly_average_values[country][year_month_tuple]= sum(temperature_values) / len(temperature_values)
except KeyError:
country_monthly_average_values[country] = dict()
country_monthly_average_values[country][year_month_tuple]= sum(temperature_values) / len(temperature_values)
except KeyError:
pass
print ('\n')
return country_monthly_average_values
ts = time.time()
print ("Start: " + datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S'))
all_countries = set()
countries, station_countries = import_stations_from_file('../tavg/site_detail.txt')
for country in countries:
all_countries.add(country)
country_monthly_average_values = import_from_file('../tavg/data.txt', countries, station_countries)
countries, station_countries = import_stations_from_file('../tmax/site_detail.txt')
for country in countries:
all_countries.add(country)
country_monthly_max_values = import_from_file('../tmax/data.txt', countries, station_countries)
countries, station_countries = import_stations_from_file('../tmin/site_detail.txt')
for country in countries:
all_countries.add(country)
country_monthly_min_values = import_from_file('../tmin/data.txt', countries, station_countries)
country_natural_disasters = import_natural_disaster_data("../natural_disaster.csv")
with open('weather_data.csv', 'w', newline='') as csvfile:
for country in all_countries:
try:
average = country_monthly_average_values[country]
minimum = country_monthly_min_values[country]
maximum = country_monthly_max_values[country]
all_disasters = country_natural_disasters[country]
for year in range(start_year, end_year):
for month in range(1,12 + 1):
try:
monthly_average = average[(year, month)]
except KeyError:
monthly_average = "*"
try:
monthly_max = maximum[(year, month)]
except KeyError:
monthly_max = "*"
try:
monthly_min = minimum[(year, month)]
except KeyError:
monthly_min = "*"
try:
disasters = all_disasters[(year,month)]
disasters = str(list(disasters)).replace('[','').replace(']','')
except KeyError:
disasters = "*"
csvfile.write(country + ";")
csvfile.write(str(year) + ";")
csvfile.write(str(month) + ";")
csvfile.write(str(monthly_average) + ";")
csvfile.write(str(monthly_max) + ";")
csvfile.write(str(monthly_min) + ";")
csvfile.write(disasters + ";")
csvfile.write("\n")
except KeyError:
print("no data found for " + country)
continue
ts = time.time()
print ("Finish: " + datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S'))
<file_sep>/TimerDisplay.cs
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class TimerDisplay : MonoBehaviour {
public float timeLeft = 1200.00f; //entspricht 10 minuten
public bool signalAtHalfTime; // If true, timeForSignal will automaticcaly be set to half of the starting time!
public float timeForSignal; // after this time, the room lights shortly red as signal
private bool alreadySignaled = false;
public GameObject timerDisplay;
bool gameEnded;
float timeStep = 1;
// Use this for initialization
void Start () {
if (signalAtHalfTime)
{
timeForSignal = timeLeft / 2;
}
}
// Update is called once per frame
void Update () {
if (!gameEnded)
{
timeLeft -= Time.deltaTime * timeStep;
if (timeLeft < 0 && !gameEnded)
{
gameEnded = true;
GameOver();
}
else
{
string minutes = Mathf.Floor(timeLeft / 60).ToString("00");
string seconds = Mathf.Floor(timeLeft % 60).ToString("00");
timerDisplay.GetComponent<TextMesh>().text = minutes + ":" + seconds;
}
if (timeLeft <= timeForSignal && !alreadySignaled)
{
alreadySignaled = true;
GameObject[] lights = GameObject.FindGameObjectsWithTag("Light");
for (int i = 0; i < lights.Length; i++)
{
if (lights[i].GetComponent<Light>() != null)
lights[i].GetComponent<SwitchEmission>().startSignal(Color.red);
}
}
}
}
public void GameOver()
{
Debug.Log("Game Over!");
// Update timer:
timerDisplay.GetComponent<TextMesh>().text = "--:--";
// All lights to red!
GameObject[] lights = GameObject.FindGameObjectsWithTag("Light");
for (int i = 0; i < lights.Length; i++)
{
if (lights[i].GetComponent<Light>() != null)
lights[i].GetComponent<Light>().color = Color.red;
}
}
public void Win()
{
}
public void changeTimerSpeed(float multiplier)
{
timeStep *= multiplier;
}
}
<file_sep>/SwitchEmission.cs
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class SwitchEmission : MonoBehaviour {
public Material materialWithEmission;
public Material signalMaterial;
public float signalFrequence;
public int howManySignals;
private Material initialMat;
// Use this for initialization
void Start () {
initialMat = this.gameObject.GetComponent<Renderer>().material;
}
// Update is called once per frame
void Update () {
}
public void SwitchEmissionOn()
{
materialWithEmission.EnableKeyword("_Emission");
}
public void SwitchEmissionOff()
{
materialWithEmission.DisableKeyword("_Emission");
}
public void startSignal(Color color)
{
StartCoroutine(lightSignal(color));
}
IEnumerator lightSignal(Color color)
{
for(int i = 0; i < howManySignals; i++) {
this.gameObject.GetComponent<Light>().color = color;
yield return new WaitForSeconds(signalFrequence);
this.gameObject.GetComponent<Light>().color = Color.white;
yield return new WaitForSeconds(signalFrequence);
}
}
}
<file_sep>/use_predictor.py
from refugee_predictor import train_model,save_plot, load_model, load_data, create_model, split_data, get_scalers, get_country_data
from datetime import datetime, timedelta
lookback = 6
forecast = 6
model_path = './models/no_weather/model'
data_path = './models/no_weather/CLEAN_DATA_2_1_NO_WEATHER.csv'
plot_path = './models/no_weather/SVG/'
number_of_epochs = 100
steps = 1
input_dimension = 5 + lookback * 5
train_amount = 180
values_per_country = 216
def create_predictor():
model = create_model(input_dimension,lookback,forecast);
model.save(model_path + '.h5')
def train_predictor(model, x, y):
# Train the model
model = train_model(model,x,y,lookback,forecast,number_of_epochs,steps,input_dimension,train_amount,values_per_country)
# Save model
model.save(model_path + '.h5')
print("Saved model to: " + model_path + '.h5')
def load_predictor_and_data():
predicator = load_model(model_path + '.h5')
x, y, _, _ = load_data(data_path,lookback,forecast,values_per_country)
return predicator,x,y
def predict_and_plot_all_countries():
model,_,_ = load_predictor_and_data()
x, y, country_encoder, disaster_encoder = load_data(data_path,lookback,forecast,values_per_country)
x_train, y_train, _, _ = split_data(x, y, lookback, forecast, train_amount,values_per_country)
scaler_x, scaler_y = get_scalers(x_train, y_train)
for country in country_encoder:
x_country, y_country = get_country_data(x,y,country_encoder,country)
save_plot(model, x_country, y_country, scaler_x, scaler_y, disaster_encoder, plot_path + country, forecast, values_per_country)
#start_time = datetime.now()
#end_time = start_time + timedelta(hours=12)
predict_and_plot_all_countries()<file_sep>/refugee_data_importer.py
import math
import sys
import csv
import re
import string
#we have dataset: (residence,origin,year,month,value)
#it was easier in txt for me
#contains all data from txt
residence=[]
origin=[]
year=[]
month=[]
value=[]
data = []
inputFile = open("rmtext.txt", "r")
#c=0
#counter= 0
for line in csv.reader(inputFile):
data.append(line)
#debugs the stupid wrong
if len(line)<5:
#print(len(line))
#print("damn son")
x = line[0].split(',')
line = x
#print(line)
#c+=1
for l in range(0,len(line)):
if l==0:
residence.append(line[l])
#works fine
elif l==1:
origin.append(line[l])
elif l==2:
year.append(line[l])
elif l==3:
month.append(line[l])
elif l==4:
value.append(line[l])
#print(line[l])
#here we create a dictionary which gives us the data, we have to type the r,o,y,m and get val
dictionary = {}
DictVal = list(zip(residence,origin,year,month))
#print(len(DictVal))
for i in range(0,len(DictVal)):
if value[i] == "":
value[i]= "0"
dictionary[DictVal[i]]= value[i]
#Thats how you find value of res, origin,year, month
print(dictionary['Italy','Israel','2016','February'])
#wait ~5 seconds to get result
<file_sep>/LightDeveloper.cs
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class LightDeveloper : MonoBehaviour {
// Sinn hierhinter ist es, dass man nicht mehr in der Szene die Lichter ausmachen muss vor Spielstart
// Also editieren kann, während die Lichter an sind
//private Light[] lights;
public GameObject[] lightObjects;
// Use this for initialization
void Start () {
//lightObjects = GameObject.FindGameObjectsWithTag("Light");
for (int i = 0; i < lightObjects.Length; i++)
{
if (lightObjects[i].GetComponent<Light>() != null)
{
lightObjects[i].GetComponent<Light>().enabled = false;
//lights[i].enabled = false;
}
}
}
// Update is called once per frame
void Update () {
}
}
<file_sep>/VibrationController.cs
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class VibrationController : MonoBehaviour {
public float pauseBetweenVibr;
public float[] vibrationSequenz;
public GameObject loadingBar;
Vector3 startSizeLoadingBar;
public float additor;
public float multiplier;
public bool resetted;
int coroutinesActive = 0;
// Use this for initialization
void Start () {
startSizeLoadingBar.x = loadingBar.transform.localScale.x;
startSizeLoadingBar.y = loadingBar.transform.localScale.y;
startSizeLoadingBar.z = loadingBar.transform.localScale.z;
}
// Update is called once per frame
void Update () {
}
//public void OnTriggerStay(Collider col)
//{
// if (col.tag == "Controller")
// {
// Debug.Log("should change bar");
// loadingBar.transform.localScale += new Vector3((loadingBar.transform.localScale.x + additor), startSizeLoadingBar.y, startSizeLoadingBar.z);
// //loadingBar.transform.localScale.Scale(new Vector3(1, 1, multiplier));
// }
//}
public void OnTriggerExit(Collider col)
{
ResetLoadingBar();
}
IEnumerator ChangeLoadingBarCor()
{
coroutinesActive++;
if (coroutinesActive > 1)
{
coroutinesActive--;
yield break;
}
while(!resetted){
loadingBar.transform.localScale += new Vector3((additor), 0, 0);
yield return new WaitForSeconds(0.2f);
}
coroutinesActive--;
}
public void ChangeLoadingBar()
{
resetted = false;
StartCoroutine(ChangeLoadingBarCor());
}
public void ResetLoadingBar()
{
loadingBar.transform.localScale = startSizeLoadingBar;
resetted = true;
//coroutinesActive = 0;
}
}
<file_sep>/HighlightObject.cs
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class HighlightObject : MonoBehaviour {
public Material highlightMaterial;
public Material StartMaterial;
public bool highlighted;
float timer = 1800;
public float resetInterval = 1800;
// Use this for initialization
void Start () {
StartMaterial = this.GetComponentInChildren<Renderer>().material;
}
// Update is called once per frame
void Update () {
timer -= Time.deltaTime;
if (timer <= 0)
{
timer = resetInterval;
resetMaterial();
}
}
public void Highlight()
{
if (!highlighted)
{
highlighted = true;
StartCoroutine(HighlightObj());
}
}
IEnumerator HighlightObj()
{
// highlighted = true;
Renderer tmpRend = this.GetComponentInChildren<Renderer>();
if (tmpRend)
{
Material tmpMat = tmpRend.material;
this.GetComponentInChildren<Renderer>().material = highlightMaterial;
yield return new WaitForSeconds(0.2f);
this.GetComponentInChildren<Renderer>().material = tmpMat;
}
highlighted = false;
}
public void resetMaterial()
{
this.GetComponentInChildren<Renderer>().material = StartMaterial;
}
}
<file_sep>/TerrainGenerator.h
#pragma once
using namespace std;
void smoothArray(float*, int, int);
float* newArray(int);
float randomNumberPosNeg(float);
float randomNumber();
float middleValue4Z(float, float, float, float, float);
float middleValue3Z(float, float, float, float);
void diamondStep(int, int, int, float*, int);
void squareStep(int, int, int, float*, int);
void squareStep2(int, int, float*, int);
void diamondSquare(float*, int);
struct alphas {
float alpha1 = 0.f;
float alpha2 = 0.f;
float alpha3 = 0.f;
};
struct color {
float r = 0.f;
float g = 0.f;
float b = 0.f;
};
struct Vector3 { float x, y, z; };
//void calcAlphas(float, float, float&, float&, float&);
//void calcAlphaMap(alphas*, float*, Vector3*, int);
//bool loadImage(GEDUtils::SimpleImage&, const char*);
void getColor(int, int, float, float, float, color&);
void createColorTexture(int, float*, Vector3*, const wchar_t*);
void createGreyImage(float*, Vector3, alphas, int);
void generateNormalMap(float*, Vector3*, int);
void outputHeightMap(float*, int, const wchar_t*);
void outputNormalMap(Vector3*, int, const wchar_t*);
void downsizing(float*, float*, int);
//array erstellen
//random number
//mittelpunkt berechnen
|
c0232a6c46fcca05a2e4048ec2128dd960077c75
|
[
"C#",
"Python",
"C++"
] | 15 |
C#
|
srdicmonika/moni-example-projects
|
9a17ba0d051efb3563b854c47b6875500605d583
|
7eaa0a9d7cbb72390b8f5f4ec5693c1e0bef85a1
|
refs/heads/master
|
<repo_name>TeamSteamIDS/steamulation<file_sep>/test.py
def get_num_games(g_id, u_id):
target = ""
for g in classification:
if g_id in classification[g]:
target = g
break
return user[u_id][target]
def get_avg_user_level(g_id):
level_total = 0
cnt = 0
for u in game_user[g_id]:
if u in user:
level_total = level_total + user[u]['userLevel']
cnt = cnt + 1
return level_total/cnt
def get_avg_user_playtime(g_id):
playtime_total = 0
cnt = 0
for u in game_user[g_id]:
playtime_total = playtime_total + game_user[g_id][u]['total_play_time']
cnt = cnt + 1
return playtime_total/cnt
def get_avg_user_playtime_in_G(g_id):
playtime_total = 0
num_user = 0
target = ""
for g in classification:
if g_id in classification[g]:
target = g
break
for all_other_g in classification[target]:
if all_other_g not in result:
continue
for u in game_user[all_other_g]:
playtime_total = playtime_total + game_user[all_other_g][u]['total_play_time']
num_user = num_user + len(game_user[all_other_g])
return playtime_total/num_user
def get_user_playtime(g_id, u_id):
return game_user[g_id][u_id]['total_play_time']
def get_user_level(u_id):
return user[u_id]['userLevel']
#remember to load pickle file: review.p
def get_total_review_g(g_id):
count = 0
for each in review:
if review[each]['game_id'] == g_id:
count = count + 1
#print('{0} : {1} '.format(g_id,count))
return count
def get_total_review_u(u_id):
count = 0
for each in review:
if review[each]['user_id'] == u_id:
count = count + 1
#print('{0} : {1} '.format(u_id,count))
return count
import pickle
import random
import numpy as np
with open("dicts/result.p", "rb") as f:
result = pickle.load(f)
with open("dicts/classification.p", "rb") as f:
classification = pickle.load(f)
with open("dicts/userDictionary_racing.p", "rb") as f:
user = pickle.load(f)
with open("dicts/game_user.p", "rb") as f:
game_user = pickle.load(f)
with open("dicts/review.p", "rb") as f:
review = pickle.load(f)
cnt = 0
#print(result['4290'])
for key in result:
if key not in game_user:
result.pop(key)
for i in result:
cnt = cnt + len(result[i])
#print(result)
test_size = cnt * 0.2
test = {}
train = {}
while len(test) < test_size:
g_id, val = random.choice(list(result.items()))
u_id, res = random.choice(list(val.items()))
tup = (g_id, u_id)
if tup not in test:
test[tup] = res
#print(test)
for g_id in result:
for u_id in result[g_id]:
tup = (g_id, u_id)
#print(tup)
if tup not in test:
#print(result[g_id][u_id])
train[tup] = result[g_id][u_id]
#print(list(game_user.keys()))
len_train = len(train)
N = len_train
X_train = np.zeros((N+1, 8))
Y_train = np.zeros((N+1, 1))
cnt = 0
for key in train:
#print(key, train[key])
g_id = key[0]
u_id = key[1]
#l = []
X_train[cnt,0] = get_num_games(g_id, u_id)
X_train[cnt,1] = get_avg_user_level(g_id)
X_train[cnt,2] = get_avg_user_playtime(g_id)
X_train[cnt,3] = get_avg_user_playtime_in_G(g_id)
X_train[cnt,4] = get_user_playtime(g_id, u_id)
X_train[cnt,5] = get_user_level(u_id)
X_train[cnt,6] = get_total_review_g(g_id)
X_train[cnt,7] = get_total_review_u(u_id)
Y_train[cnt] = result[g_id][u_id]
if cnt % 1000 == 0:
print(cnt, len_train)
cnt = cnt + 1
if cnt > N:
break
#print(X_train)
#len_train = len(train)
X_test = np.zeros(((N/5)+1, 8))
Y_test = np.zeros(((N/5)+1, 1))
cnt = 0
for key in test:
#print(key, train[key])
g_id = key[0]
u_id = key[1]
#l = []
if u_id not in user:
continue
X_test[cnt,0] = get_num_games(g_id, u_id)
X_test[cnt,1] = get_avg_user_level(g_id)
X_test[cnt,2] = get_avg_user_playtime(g_id)
X_test[cnt,3] = get_avg_user_playtime_in_G(g_id)
X_test[cnt,4] = get_user_playtime(g_id, u_id)
X_test[cnt,5] = get_user_level(u_id)
X_test[cnt,6] = get_total_review_g(g_id)
X_test[cnt,7] = get_total_review_u(u_id)
Y_test[cnt] = int(result[g_id][u_id])
if cnt % 1000 == 0:
print(cnt, len_train)
cnt = cnt + 1
if cnt > N/5:
break
#N = 4000
#X_small = X_train[1:N,:]
#Y_small = Y_train[1:N,:]
#X_st = X_test[1:N/5,:]
#Y_st = Y_test[1:N/5,:]
import matplotlib
import matplotlib.pyplot as plt
from sklearn.metrics import roc_curve, auc
from sklearn.svm import SVC
model_S = SVC(kernel="linear", probability = True)
model_R = SVC(kernel="rbf", probability = True)
model_S.fit(X_train, np.ravel(Y_train))
model_R.fit(X_train,np.ravel(Y_train))
print(model_S.score(X_test, np.ravel(Y_test)))
print(model_R.score(X_test, np.ravel(Y_test)))
<file_sep>/parser/built_dict.py
import pickle
import csv
genre = ['Action', 'Adventure', 'Racing', 'RPG', 'Simulation', 'Sports', 'Strategy']
i = 0
for name in genre:
path = 'parse/appId/appId_' + name
with open(path, 'rb') as csvfile:
spamreader = csv.reader(csvfile, delimiter=',')
for row in spamreader:
print(row)
i = i + 1
if i == 1:
break<file_sep>/parser/parser/parse_General.py
import csv
import time
import re
import os
import sys
from bs4 import BeautifulSoup
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.keys import Keys
from selenium.common.exceptions import NoSuchElementException
def parse(id, newFileName):
lang_es = webdriver.ChromeOptions()
lang_es.add_argument("--lang=en")
#driver = webdriver.Chrome("chromedriver.exe",chrome_options=lang_es)
driver = webdriver.PhantomJS("/home/hyc404/steamulation/parser/parser/phantomjs-2.1.1-linux-x86_64/bin/phantomjs")
#scrolling
reviewUrl = "http://steamcommunity.com/app/" + id + "/reviews/?browsefilter=toprated&snr=1_5_reviews_"
driver.get(reviewUrl)
time.sleep(1)
curParse = 0
count = 0
originSoup = BeautifulSoup(driver.page_source,'html.parser')
curParse = len(originSoup.findAll("div", { "class" : "date_posted" }))
click = False
end = False
startTime = time.clock()
while True:
if driver.current_url != reviewUrl:
driver.close()
return 0
break
oldSource = driver.page_source
driver.execute_script("window.scrollTo(0, document.body.scrollHeight);")
#after scroll
newSource = driver.page_source
if len(newSource) != len(oldSource):
startTime = time.clock()
looptime = time.clock() - startTime
if looptime > 15 and click == False:
print("finding click")
try:
driver.find_element_by_link_text('See More Content').click()
print("click!!")
startTime = time.clock()
continue
except:
click = True
continue
if looptime > 20 and click == True:
print("finding end")
try:
driver.find_element_by_link_text('share a screenshot, make a video, or start a new discussion!')
break
except:
click = False
startTime = time.clock()
end = True
continue
if end == True:
break
page_source = newSource
driver.close()
#parsing
print("parse "+id)
soup = BeautifulSoup(page_source,'html.parser')
user = soup.findAll("div", {"class" : "apphub_CardContentAuthorBlock tall"})
date = soup.findAll("div", { "class" : "date_posted" })
hour = soup.findAll("div", { "class" : "hours" })
helpful = soup.findAll("div", { "class" : "found_helpful" })
positive = soup.findAll("div", { "class" : "title" })
content = soup.findAll("div", { "class" : "apphub_CardTextContent" })
userData = []
userName = []
dateData = []
contentData =[]
positiveData =[]
hourData = []
helpfulData = []
notHelpfulData = []
funnyData = []
for each in user:
userData.append(each.find('a')['href'])
for each in date:
dateData.append(each.get_text())
for each in content:
contentData.append(each.get_text())
for each in positive:
if(each.get_text()=="Recommended"):
positiveData.append(1)
else:
positiveData.append(0)
for each in hour:
hourData.append(each.get_text())
for each in helpful:
helpfulData.append(each.get_text())
print("write: "+id)
with open(newFileName, 'w', encoding='utf-8') as f:
writer = csv.writer(f)
writer.writerow(["user name","positive or negative","total playing time","number of helpful","content"])
leng = len(userData)
for i in range(leng):
try:
writer.writerow([userData[i],positiveData[i],hourData[i],helpfulData[i],contentData[i]])
except:
print("out of range")
f.close()
print("end: "+id)
tag = sys.argv[1]
#start = sys.argv[2]
count = 0
appIdFile = "../appId/"+"appId_"+tag+".csv"
for id in open(appIdFile):
for char in id:
if char in "\n":
id = id.replace(char,'')
fileName = tag + "/"+id+".csv"
newFileName =tag+"/"+"parseNew_"+id+".csv"
'''
if os.path.isfile(fileName):
print(id+" exist!")
else:
print(id+" start")
parse(id)
'''
if os.path.isfile(newFileName):
print(newFileName+' exist')
count = count+1
continue
else:
with open(newFileName, 'w') as fout:
print('{0} start'.format(newFileName))
print(count)
fout.close()
parse(id, newFileName)
<file_sep>/parser/parser/parseUser/parseUserGeneral.py
import csv
import time
import os
import re
import sys
from bs4 import BeautifulSoup
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.keys import Keys
from selenium.common.exceptions import NoSuchElementException
import pandas as pd
import steamapi
def parseUser(fileName, newFileName, csvtag, csvid):
df = pd.read_csv(fileName)
username = list()
recommend = list()
time = list()
helpful = list()
content = list()
for each in df[df.columns[0]]:
if each != '':
username.append(each)
for each in df[df.columns[1]]:
if each != '':
recommend.append(each)
for each in df[df.columns[2]]:
if each != '':
time.append(each)
for each in df[df.columns[3]]:
if each != '':
helpful.append(each)
for each in df[df.columns[4]]:
if each != '':
content.append(each)
print('UserNum: {0}'.format(len(username)))
genereList = []
userLevel = []
for each in username:
print(each)
if each == '':
continue
user = each
generes = {'action':0 ,'adventure':0 ,'racing':0 ,'rpg':0 ,'simulation':0 ,'sports':0 ,'strategy':0}
appidList = []
#Regular Expression
idReg = re.compile('http://steamcommunity.com/id/(.*)/')
profileReg = re.compile('http://steamcommunity.com/profiles/(.*)/')
idMatch = re.match(idReg, user)
profileMatch = re.match(profileReg, user)
if idMatch:
name = idMatch.group(1)
try:
me = steamapi.user.SteamUser(userurl=name)
userLevel.append(me.level)
for each in me.games:
appidList.append(each.appid)
except:
genereList.append(generes)
print('exception')
continue
elif profileMatch:
name = profileMatch.group(1)
try:
me = steamapi.user.SteamUser(name)
userLevel.append(me.level)
for each in me.games:
appidList.append(each.appid)
except:
genereList.append(generes)
print('exception')
continue
else:
print('not found user')
genereList.append(generes)
continue
for id in appidList:
genereTag = ['action','adventure','racing','rpg','simulation','sports','strategy']
found = 0
for tag in genereTag:
if found == 0:
fileNameApp = "../../appId/appId_"+ tag + ".csv"
with open(fileNameApp, 'rt') as f:
reader = csv.reader(f, delimiter=',')
for row in reader:
if str(id) == row[0]:
generes[tag] = generes[tag] + 1
found = 1
break
genereList.append(generes)
print(generes)
print(len(genereList))
outFile = newFileName
with open(outFile, 'w') as fout:
writer = csv.writer(fout)
writer.writerow(["user name","user level","positive or negative","total playing time","number of helpful","content","Action","Adventure","Racing","RPG","Simulation","Sports","Strategy"])
for i in range(len(username)):
try:
genere = genereList[i]
writer.writerow([username[i],userLevel[i],recommend[i],time[i],helpful[i],content[i],genere['action'],genere['adventure'],genere['racing'],genere['rpg'],genere['simulation'],genere['sports'],genere['strategy']])
except:
print('out of index')
print('{0} succeed'.format(outFile))
fout.close()
steamapi.core.APIConnection(api_key="<KEY>")
tag = sys.argv[1]
#start = sys.argv[2]
count = 0
appIdFile = "../../appId/"+"appId_"+tag+".csv"
for id in open(appIdFile):
for char in id:
if char in "\n":
id = id.replace(char,'')
fileName = "../"+tag+"/parseNew_"+id+".csv"
newFileName = "../"+tag+"/finish/"+id+".csv"
if os.path.isfile(fileName):
if os.path.isfile(newFileName):
print(newFileName+' exist')
count = count+1
continue
else:
with open(newFileName, 'w') as fout:
print('{0} start'.format(newFileName))
fout.close()
print(id+" start!")
userList = []
df = pd.read_csv(fileName)
for each in df[df.columns[0]]:
userList.append(each)
print(count)
parseUser(fileName,newFileName, tag, id)
else:
print(id+" not exist")
<file_sep>/built_dict.py
import pickle
import csv
def classification():
classification = {}
genre = ['action', 'adventure', 'racing', 'rpg', 'simulation', 'sports', 'strategy']
i = 0
for name in genre:
g_list = []
path = 'parser/appId/appId_' + name + '.csv'
i = 1
with open(path, "r") as f:
for line in f:
g_id = int(line)
g_list.append(g_id)
classification[name] = g_list
with open("dicts/classification.p", "wb") as f:
pickle.dump(classification, f)
'''
for key in classification:
for g_id in classification[key]:
path = ''
with open(path, "r") as f:
for line in f:
'''
def review(genre):
with open("dicts/classification.p", "rb") as f:
classification = pickle.load(f)
path = ""
path_new = ""
cnt = 0
review = {}
result = {}
game_user = {}#{'action':{}, 'adventure':{}, 'racing':{}, 'rpg':{}, 'simulation':{}, 'sports':{}, 'strategy':{}}
for g in genre:
path = 'parser/parser/' + g + '/finish/'
for g_id in classification[g]:
path_new = path + str(g_id) + '.csv'
#print(path_new)
print(g_id)
cnt = 0
try:
f = open(path_new, 'rt')
reader = csv.reader(f)
for l in reader:
cnt = cnt + 1
if cnt == 1:
continue
rev_id = str(g_id) + str(cnt)
v = {}
v['content'] = l[-8]
v['game_id'] = g_id
# retrieve user_id
user_list = l[0].split("/")
#
v['user_id'] = user_list[len(user_list) - 2]
v['recommend'] = l[2]
if g_id in result:
result[g_id][v['user_id']] = v['recommend']
else:
result[g_id] = {v['user_id']: v['recommend']}
# retrieve helpful
help_list = l[4].split(" people ")
#print(l[4],help_list)
num_help_list = help_list[0].split(" of ")
num_feel_helpful = num_help_list[0].split("\t")[-1]
if len(num_help_list) == 1:
if "No" in num_help_list[0]:
v['helpful'] = 1.0
else:
v['helpful'] = 0.0
else:
v['helpful'] = int(num_feel_helpful.replace(',',''))/int(num_help_list[1].replace(',',''))
#
if g_id not in game_user:
game_user[g_id] = {}
if v['user_id'] not in game_user[g_id]:
game_user[g_id][v['user_id']] = \
{'total_play_time':float(l[3].split(" hrs")[0].replace(',',''))}
#print(game_user)
review[rev_id] = v
except:
continue
finally:
f.close()
with open("dicts/game_user.p", "wb") as f:
pickle.dump(game_user, f)
with open("dicts/result.p", "wb") as f:
pickle.dump(result, f)
with open("dicts/review.p", "wb") as f:
pickle.dump(review, f)
classification()
l = ["racing"]
review(l)
<file_sep>/feature.py
def get_num_games(g_id, u_id):
target = ""
for g in classification:
if g_id in classification[g]:
target = g
break
return user[u_id][target]
def get_avg_user_level(g_id):
level_total = 0
for u in game_user[g_id]:
level_total = level_total + user[u]['userLevel']
return level_total/len(game_user[g_id])
def get_avg_user_playtime(g_id):
playtime_total = 0
for u in game_user[g_id]:
playtime_total = playtime_total + game_user[g_id][u]['total_play_time']
return playtime_total/len(game_user[g_id])
def get_avg_user_playtime_in_G(g_id):
playtime_total = 0
num_user = 0
target = ""
for g in classification:
if g_id in classification[g]:
target = g
break
for all_other_g in classification[target]:
for u in game_user[all_other_g]:
playtime_total = playtime_total + game_user[all_other_g][u]['total_play_time']
num_user = num_user + len(game_user[all_other_g])
return playtime_total/len(game_user[g_id])
def get_user_playtime(g_id, u_id):
return game_user[g_id][u_id]['total_play_time']
def get_user_level(u_id):
return user[u_id]['userLevel']
#remember to load pickle file: review.p
def get_total_review_g(g_id):
count = 0
for each in review:
if review[each]['game_id'] == g_id:
count = count + 1
#print('{0} : {1} '.format(g_id,count))
return count
def get_total_review_u(u_id):
count = 0
for each in review:
if review[each]['user_id'] == u_id:
count = count + 1
#print('{0} : {1} '.format(u_id,count))
return count
<file_sep>/parser/parser/parseUser/userDictionary.py
#find . -type f -name "parseNew_*" -print0 | xargs -0 -I {} mv {} new (move to folder)
#find . -type f -name "parseNew_*" | wc -l (count numbers)
import pickle
import pandas as pd
import csv
import time
import os
import re
import sys
#Build Genere of user diction
#output will be /genere/finish/genere.p
#python3 userDictionary.py "genere"
tag = sys.argv[1]
fileList = os.listdir("../"+tag+"/finish")
userField = ['userLevel', 'action', 'adventure', 'racing', 'rpg', 'simulation', 'sports', 'strategy']
userKey = {}
for fileName in fileList:
print('{0} start'.format(fileName))
try:
if fileName != '.DS_Store':
fileName = '../'+tag+'/finish/' + fileName
df = pd.read_csv(fileName)
userName = list()
userLevel = list()
action = list()
adventure = list()
racing = list()
rpg = list()
simulation = list()
sports = list()
strategy = list()
for each in df[df.columns[0]]:
if each != '':
userName.append(each)
for each in df[df.columns[1]]:
if each != '':
userLevel.append(each)
for each in df[df.columns[6]]:
if each != '':
action.append(each)
for each in df[df.columns[7]]:
if each != '':
adventure.append(each)
for each in df[df.columns[8]]:
if each != '':
racing.append(each)
for each in df[df.columns[9]]:
if each != '':
rpg.append(each)
for each in df[df.columns[10]]:
if each != '':
simulation.append(each)
for each in df[df.columns[11]]:
if each != '':
sports.append(each)
for each in df[df.columns[12]]:
if each != '':
strategy.append(each)
for i in range(len(userName)):
userValue = {'userLevel':0, 'action':0, 'adventure':0, 'racing':0, 'rpg':0, 'simulation':0, 'sports':0, 'strategy':0 }
userValue['userLevel'] = userLevel[i]
userValue['action'] = action[i]
userValue['adventure'] = adventure[i]
userValue['racing'] = racing[i]
userValue['rpg'] = rpg[i]
userValue['simulation'] = simulation[i]
userValue['sports'] = sports[i]
userValue['strategy'] = strategy[i]
user_list = userName[i].split("/")
userKey[user_list[len(user_list) - 2]] = userValue
except:
print('{0} error'.format(fileName))
continue
with open('../../../dicts/userDictionary_'+tag+'.p', "wb") as f:
pickle.dump(userKey, f)
|
40f6029b5a2c2d4004de1b6f896f1c7f4942bc91
|
[
"Python"
] | 7 |
Python
|
TeamSteamIDS/steamulation
|
eb3ed4e15a65da5b38793749fc26541877729353
|
9b59f8e3665fb09258054c154104c2177928ed20
|
refs/heads/master
|
<file_sep>from django.conf.urls import patterns, include, url
from homepage.views import ItemList, ItemDetailView, ajax
from haystack.views import SearchView
from haystack.forms import SearchForm
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', ItemList.as_view()),
url(r'^admin_tools/', include('admin_tools.urls')),
url(r'^i18n/', include('django.conf.urls.i18n')),
url(r'^admin/', include(admin.site.urls)),
url(r'^item/(?P<pk>\w+)/', ItemDetailView.as_view()),
url(r'^search/$', ajax),
)
"""
urlpatterns += patterns('',
(r'^search/', SearchView(form_class = SearchForm)),
)"""<file_sep>#!/bin/bash
set -e
LOGFILE=/home/test1.log
LOGDIR=$(dirname $LOGFILE)
NUM_WORKERS=3
ADDRESS=127.0.0.1:8000
DJANGO_WSGI_MODULE=homepage.wsgi
# user/group to run as
USER=root
GROUP=root
exec gunicorn $DJANGO_WSGI_MODULE:application -c gunicorn.conf.py -w $NUM_WORKERS --bind=$ADDRESS --user=$USER --group=$GROUP --log-level=debug
<file_sep># -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Style'
db.create_table(u'homepage_style', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=30)),
('comment', self.gf('django.db.models.fields.TextField')()),
('contr', self.gf('django.db.models.fields.CharField')(max_length=30)),
('lolfield', self.gf('django.db.models.fields.CharField')(max_length=30)),
))
db.send_create_signal(u'homepage', ['Style'])
# Adding model 'Item'
db.create_table(u'homepage_item', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=30)),
('comment', self.gf('django.db.models.fields.TextField')()),
('price', self.gf('django.db.models.fields.CharField')(max_length=30)),
))
db.send_create_signal(u'homepage', ['Item'])
# Adding M2M table for field type1 on 'Item'
m2m_table_name = db.shorten_name(u'homepage_item_type1')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('item', models.ForeignKey(orm[u'homepage.item'], null=False)),
('style', models.ForeignKey(orm[u'homepage.style'], null=False))
))
db.create_unique(m2m_table_name, ['item_id', 'style_id'])
def backwards(self, orm):
# Deleting model 'Style'
db.delete_table(u'homepage_style')
# Deleting model 'Item'
db.delete_table(u'homepage_item')
# Removing M2M table for field type1 on 'Item'
db.delete_table(db.shorten_name(u'homepage_item_type1'))
models = {
u'homepage.item': {
'Meta': {'ordering': "['name']", 'object_name': 'Item'},
'comment': ('django.db.models.fields.TextField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'price': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'type1': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['homepage.Style']", 'symmetrical': 'False'})
},
u'homepage.style': {
'Meta': {'ordering': "['-name']", 'object_name': 'Style'},
'comment': ('django.db.models.fields.TextField', [], {}),
'contr': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lolfield': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '30'})
}
}
complete_apps = ['homepage']<file_sep># -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Style.contr'
db.add_column(u'homepage_style', 'contr',
self.gf('django.db.models.fields.CharField')(default='www.boston-dynimics.com', max_length=30),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Style.contr'
db.delete_column(u'homepage_style', 'contr')
models = {
u'homepage.item': {
'Meta': {'ordering': "['-name']", 'object_name': 'Item'},
'comment': ('django.db.models.fields.TextField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'price': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'type': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['homepage.Style']", 'symmetrical': 'False'})
},
u'homepage.style': {
'Meta': {'ordering': "['-name']", 'object_name': 'Style'},
'comment': ('django.db.models.fields.TextField', [], {}),
'contr': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '30'})
}
}
complete_apps = ['homepage']<file_sep>from haystack import indexes
from homepage.models import Item
class ItemIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(use_template=True, document=True)
name = indexes.CharField(model_attr='name')
comment = indexes.CharField(model_attr='comment')
price = indexes.CharField(model_attr='price')
def get_model(self):
return Item
def index_queryset(self, using=None):
return self.get_model().objects.all()
<file_sep># models.py
from django.db import models
from django.contrib import admin
class Style(models.Model):
name = models.CharField(max_length=30)
comment = models.TextField()
contr = models.CharField(max_length=30)
lolfield = models.CharField(max_length=30)
class Meta:
ordering = ["-name"]
def __unicode__(self):
return self.name
class Item(models.Model):
name = models.CharField(max_length=30)
comment = models.TextField()
type1 = models.ManyToManyField(Style)
price = models.CharField(max_length=30)
class Meta:
ordering = ["name"]
def __unicode__(self):
return self.name
admin.site.register(Item)
admin.site.register(Style)
<file_sep>from django.views.generic import DetailView
from django.views.generic import ListView
from homepage.models import Item
from homepage.models import Style
from django.views.generic import View
from django.views.generic.base import TemplateResponseMixin
from haystack.query import SearchQuerySet
from django.http import *
from django.shortcuts import render_to_response
from django.template import RequestContext
import json
from django.core import serializers
from django.utils import simplejson
class ItemList(ListView):
model = Item
context_object_name = 'items'
template_name = 'item_list.html'
paginate_by = 5
def get_context_data(self, **kwargs):
# Call the base implementation first to get a context
context = super(ItemList, self).get_context_data(**kwargs)
# Add in a QuerySet of all the books
context['menu'] = Style.objects.all()
return context
class ItemDetailView(DetailView):
model = Item
context_object_name = 'item'
template_name = 'item_detail.html'
def get_context_data(self, **kwargs):
context = super(ItemDetailView, self).get_context_data(**kwargs)
context['menu'] = Style.objects.all()
return context
def ajax(request):
if request.POST.has_key('client_response'):
z = request.POST['client_response']
y = SearchQuerySet().filter(content=z)
data = serializers.serialize("json", [q.object for q in y])
return HttpResponse(data, content_type="application/json")
else:
return render_to_response('search.html', context_instance=RequestContext(request))
<file_sep>bind = "127.0.0.1:8000"
user = "root"
group = "root"
logfile = "/home/gunicorn.log"
workers = 3
loglevel = "info"
proc_name = "test2"
<file_sep>from django.db.models.constants import LOOKUP_SEP
from django.db.models import sql
from django.db import connection
def load_related_m2m(object_list, field):
select_fields = ['pk']
related_field = object_list.models.get_field(field)
related_model = related_field.rel.to
cache_name = 'all_%s' % field
for f in related_model._meta.local_fields:
select_fields.append('%s%s%s' % (field, LOOKUP_SEP, f.column))
query = sql.Query(object_list.model, connection)
query.add_fields(select_fields)
query.add_filter(('pk__in', [obj.pk for obj in object_list]))
related_dict = {}
for row in query.results_iter():
if row[2]:
related_dict.setdefault(row[0], []).append(related_model(*row[1:]))
for obj in object_list:
try:
setattr(obj, cache_name, related_dict[obj.pk])
except KeyError:
setattr(obj, cache_name, [])
return object_list
|
8521e59671959a1c99897c6519aab7505e993e38
|
[
"Python",
"Shell"
] | 9 |
Python
|
gvladimirwork/2test
|
41777db41dedae7f2673e73948a13c565b9f7d95
|
d71aa03716ab384a631701fef0325c68d9e121b2
|
refs/heads/master
|
<file_sep># Collection of embedded code for TI TM4 Microcontroller
A collection of codes when working with the TI TM4 micro-controller. Each codes is described below.
## LCD.c
Interface the LCD with the MCU using GPIO, timer, PWM, interrupts.
Features: display characters on LCD, adjust brightness using, scrolling speed
## Theremin.c
A simple digital theremin using Buzzer and photoresistors.
The theremin uses the heterodyne principle to generate an audio signal. The instrument's pitch circuitry includes two radio frequency oscillators. One oscillator operates at a fixed frequency. The frequency of the other oscillator is controlled by the performer's distance from the pitch control antenna.
http://www.youtube.com/watch?v=w5qf9O6c20o
## IR.c
Design and Implement a simple Infrared remote control that is compatible with the Deepcool RGB Colour LED Light Strip in the lab
IR_FSM.png contains the FSM design
<file_sep>#include "CU_TM4C123.h"
#include "LCD.h"
#include <string.h>
// General defines
#define SYS_FREQ 16000000UL
// For Timer0B
#define PRESCALE 255UL
#define MILLISEC_IN_SEC 1000
// For ADC
#define ANALOG1 (1UL << 2) // PE2 <-> AIN1
#define ANALOG2 (1UL << 3) // PE3 <-> AIN0
#define SAMPLING_RATE 100 //Sampling rate of 100ms
// LDR used for PE2 freq (darker)
#define MIN_LUX 190
#define MAX_LUX 450
// LDR used for PE3 volume, (lighter)
// This is because LDR is different. If the same then don't need this
#define MIN_LUX_2 1050
#define MAX_LUX_2 3000
// For PWM
static uint16_t pwm_load = 0;
static uint32_t frequency = 800;
static uint16_t volume = 90;
// Happy Birthday notes
static uint32_t notes[] = {261, 293, 329, 349, 392, 440, 494, 523, 0};
static uint32_t hbd_notes[] = {1,1,2,1,4,3,9,1,1,2,1,5,4,9,1,1,8,6,4,3,2,9,8,8,6,4,5,4};
static uint8_t numberOfElements = sizeof(hbd_notes)/sizeof(hbd_notes[0]);
static uint8_t playing_hbd = 0;
static uint16_t pitch_adjust = 0;
// Miscellaneous functions
void turn_on_green_LED(void);
void turn_off_green_LED(void);
void turn_on_red_LED(void);
void turn_off_red_LED(void);
void init_gpio(void);
void init_nvic(void);
void init_timer_0A(void);
//void delay_timer(uint32_t delay_time_ms);
//void init_timer_0B(void);
//void interrupt_timer_0B(uint32_t delay_time_ms);
//void TIMER0B_Handler(void);
// ADC functions
void init_ADC(void);
void ADC0SS3_Handler(void);
void ADC1SS3_Handler(void);
//PWM functions
void set_frequency(uint16_t frequency);
void set_volume(uint8_t volume);
void PWM_init(void);
//Innovation play Happy Birthday Song
void play_hbd(void);
void init_timer_1B(void);
void interrupt_timer_1B(uint32_t delay_time_ms);
void TIMER1B_Handler(void);
void init_pushButton(void);
void GPIOF_Handler(void);
//Innovation choose different instruments
typedef struct {
char name[10];
uint32_t frequency;
} instrument_t;
//Define a list of different instruments;
static instrument_t instruments[5];
static uint32_t instr_count=0;
static uint32_t instr_pos=0;
void init_instruments();
void change_instruments();
/* Device has 3 states
** State 0: Normal pitch
** State 1: Happy Birthday Song
** State 2: Changing instrument
*/
static uint8_t state = 0;
int main(void)
{
init_gpio();
/* Setup Timers */
init_timer_0A();
//init_timer_0B();
// Default to theremin on startup
turn_on_red_LED();
PWM_init();
init_ADC();
init_nvic();
init_pushButton();
init_instruments();
while(1)
{}
}
void turn_on_green_LED(void)
{
GPIOF->DATA |= (1UL << 3);
}
void turn_off_green_LED(void)
{
GPIOF->DATA &= ~(1UL << 3);
}
void turn_on_red_LED(void)
{
GPIOF->DATA |= (1UL << 1);
}
void turn_off_red_LED(void)
{
GPIOF->DATA &= ~(1UL << 1);
}
void init_gpio(void)
{
uint32_t dummy;
// Enable GPIOE (Speaker) and GPIOF (LED for debug)
SYSCTL->RCGCGPIO |= ((1UL << 4) | (1UL << 5));
// Do a dummy read to insert a few cycles after enabling the peripheral.
dummy = SYSCTL->RCGCGPIO;
// Sets PE4 for Digital out
GPIOE->DIR |= (1UL << 4);
GPIOE->DEN |= (1UL << 4);
// Sets GPIOF's direction for LED
GPIOF->DIR |= ((1UL << 3) | (1UL << 1)); // Output for PF3 LED
GPIOF->DEN |= ((1UL << 3) | (1UL << 1)); //Enabled PF3(LED)
//Enable Analog functions (disabled digital)
//GPIOE->AFSEL |= (ANALOG1 | ANALOG2);
GPIOE->DIR &= ~(ANALOG1|ANALOG2); //Input
GPIOE->DEN &= ~(ANALOG1|ANALOG2); //Digital disable
//GPIOE->AMSEL |= (ANALOG1|ANALOG2); // Select Analog In
GPIOE->PUR |= (ANALOG1|ANALOG2); // Pull up resistor
}
/** * Initialize the ADC **/
void init_ADC(void)
{
uint32_t dummy;
/* Configure ADC0, SS03 */
SYSCTL->RCGCADC |= (1UL<<0); //Enable clock to ADC0
//Wait until ADC0 ready
dummy = SYSCTL->RCGCADC;
dummy = SYSCTL->RCGCADC;
GPIOE->AFSEL |= (ANALOG1 | ANALOG2);
GPIOE->AMSEL |= (ANALOG1 | ANALOG2);
ADC0->ACTSS &= ~(1UL<<3); //Disable SS03 while programming
ADC0->EMUX |= (0x5<<12); //SS03 on timer
ADC0->SSMUX3 = (0x1<<0); //SS03 sample AIN1
ADC0->SSCTL3 = ((1UL<<2) | (1UL << 1)); //SS03 trigger interrupt
ADC0->RIS &= ~(1UL<<3); //Clear interrupt
ADC0->IM |= (1UL<<3); //Arm interrupt at SS03
ADC0->ISC &= ~(1UL<<3); //Clear Level-senstive interrupt
ADC0->ACTSS |= (1UL<<3); //Enable SS03
/* Configure ADC1, SS03 */
SYSCTL->RCGCADC |= (1UL << 1); //Enable clock to ADC1
dummy = SYSCTL->RCGCADC;
dummy = SYSCTL->RCGCADC;
ADC1->ACTSS &= ~(1UL << 3); //Disable SS03 while programming
ADC1->EMUX |= (0x5<<12); //SS03 on timer
ADC1->SSMUX3 = 0x0; //SS03 sample AIN0
ADC1->SSCTL3 = ((1UL<<2) | (1UL << 1)); //SS03 trigger interrupt
ADC1->RIS &= ~(1UL<<3); //Clear interrupt
ADC1->IM |= (1UL<<3); //Arm interrupt at SS03
ADC1->ISC &= ~(1UL<<3); //Clear Level-senstive interrupt
ADC1->ACTSS |= (1UL<<3); //Enable SS03
}
/** Interrupt handler for SS03
* Mapped to PE2
**/
void ADC0SS3_Handler(void)
{
uint16_t lux1;
lux1 = ADC0->SSFIFO3; //read lux value of SS03
if (playing_hbd)
{
if(lux1 < MIN_LUX)
pitch_adjust = 1;
else if (190 <= lux1 && lux1 < 300)
pitch_adjust = 2;
else
pitch_adjust = 3;
}
else
{
if(lux1 < MIN_LUX)
frequency = (MIN_LUX*MIN_LUX)/100;
else if (lux1 > MAX_LUX)
frequency = (MAX_LUX*MAX_LUX)/100;
else
frequency = (lux1*lux1)/100;
}
set_frequency(frequency);
set_volume(volume);
ADC0->ISC |= (1UL << 3);
NVIC->ICPR[0] = (1UL << 17); //Clear pending bit in NVIC for IRQ#17 ADC0
}
// Mapped to PE3
void ADC1SS3_Handler(void)
{
uint16_t lux2 = ADC1->SSFIFO3;
if(lux2 < MIN_LUX_2)
volume = 1;
else if ( MIN_LUX_2 <= lux2 && lux2 < 1200)
volume = 5;
else if ( 1200 <= lux2 && lux2 < 1500)
volume = 10;
else if ( 1500 <= lux2 && lux2 < 2500)
volume = 25;
else
volume = 50;
set_volume(volume);
ADC1->ISC |= (1UL << 3);
NVIC->ICPR[1] = (1UL << 19); //Clear pending bit in NVIC for IRQ#51 ADC1
}
/** Inits NVIC for TIMER0B (IRQ#20) and ADC (IRQ#17) **/
void init_nvic(void)
{
// init for ADC0 SS03
NVIC->IP[17] = (2<<5); // SS03 has Priority 2
NVIC->ICPR[0] = (1UL<<17);// Clear pending bit to be safe
NVIC->ISER[0] = (1UL<<17);// Enable interrupt at NVIC
// init for ADC1 SS03
NVIC->IP[51] = (2<<5); // SS03 has Priority 2
NVIC->ICPR[1] = (1UL<<19);// Clear pending bit to be safe
NVIC->ISER[1] = (1UL<<19);// Enable interrupt at NVIC
/* Init for TIMER0B */
NVIC->IP[20] = (1 << 5); //IRQ#20 for TIMER0B priority 1, see ds pg104
NVIC->ICPR[0] = (1UL << 20); //Clear pending bit to be safe
NVIC->ISER[0] = (1UL << 20); //Enable interrup at NVIC
/* Init for TIMER1B */
NVIC->IP[22] = (1 << 5); //IRQ#22 for TIMER1B priority 1, see ds pg104
NVIC->ICPR[0] = (1UL << 22); //Clear pending bit to be safe
NVIC->ISER[0] = (1UL << 22); //Enable interrup at NVIC
/* Init for PF4 (SW1) */
NVIC->IP[30] = (5 << 5); //IRQ#30 for GPIOF, see ds pg104
NVIC->ICPR[0] = (1UL << 30); //Clear pending bit to be safe
NVIC->ISER[0] = (1UL << 30); //Enable interrup at NVIC
}
/** Configures TIMER0A periodic and counts down
* Used for triggering ADC
**/
void init_timer_0A(void)
{
uint32_t dummy;
SYSCTL->RCGCTIMER |= 1UL << 0; //Enable TIMER0
dummy = SYSCTL->RCGCTIMER; //Dummy read to give TIMER time to respond
TIMER0->CTL &= ~(1UL << 0); //Disable TIMERA in TIMER0 during config
TIMER0->CFG = 0x4UL; //Independent 16-bit timers
TIMER0->TAMR = 0x2UL; //Periodic Timer mode
TIMER0->TAPR = PRESCALE; //Prescale value
TIMER0->CTL |= (1UL << 1); //TASTALL
TIMER0->CTL |= (1UL << 5); // Allow timer to trigger ADC
TIMER0->CTL &= ~(1UL << 0); //Disable timer while configuring
TIMER0->TAILR = ((SYS_FREQ/PRESCALE+1)*SAMPLING_RATE)/MILLISEC_IN_SEC; //Set timer reload to be
TIMER0->CTL |= (1UL << 0); //Enable TIMER0A
return;
}
/** Sets TIMERA as delay timer that delays for the amount in ms specified in input
* Returns only when timer has expired
**/
/*
void delay_timer(uint32_t delay_time_ms)
{
uint32_t timer_reload_val;
uint32_t prescale_output;
//The reload value for the timer is the delay period we would like divided by
//prescaled clock period
prescale_output = SYS_FREQ/(PRESCALE+1);
// VERY IMPORTANT: delay_time_ms/MILLISEC_IN_SEC will give 0, THE ORDER MATTERS HERE.
timer_reload_val = (delay_time_ms*prescale_output)/MILLISEC_IN_SEC;
TIMER0->TAILR = timer_reload_val;
TIMER0->CTL |= (1UL << 0); //Enable TIMER0A
//While timer has not expired yet, Loop forever
while( TIMER0->CTL & 1UL )
{
}
return;
}
*/
/** Configures TIMER0B as periodic count down **/
void init_timer_0B(void)
{
uint32_t dummy;
SYSCTL->RCGCTIMER |= 1UL << 0; //Enable TIMER0
dummy = SYSCTL->RCGCTIMER; //Dummy read to give TIMER time to respond
TIMER0->CTL &= ~(8UL << 0); //Disable TIMERB in TIMER0 during config
TIMER0->CFG = 0x4UL; //Independent 16-bit timers
TIMER0->TBMR = 0x2UL; //Periodic Timer mode
TIMER0->TBPR = PRESCALE; //Prescale value
TIMER0->ICR = (1UL << 8); // Clear TBTRIS
TIMER0->IMR = (1UL << 8); // Interrupt enabled for TIMER0B
TIMER0->CTL |= (1UL << 9); //TBSTALL
return;
}
/** Sets TIMER0B as periodic timer. Interrupt on every expiration **/
void interrupt_timer_0B(uint32_t delay_time_ms)
{
uint32_t timer_reload_val;
uint32_t prescale_output;
init_timer_0B();
//The reload value for the timer is the delay period we would like divided by
//prescaled clock period
prescale_output = SYS_FREQ/(PRESCALE+1);
// VERY IMPORTANT: delay_time_ms/MILLISEC_IN_SEC will give 0, THE ORDER MATTERS HERE.
timer_reload_val = (delay_time_ms*prescale_output)/MILLISEC_IN_SEC -1;
TIMER0->CTL &= ~(1UL << 8); //Disable TIMER0B
TIMER0->TBILR = timer_reload_val;
TIMER0->CTL |= (1UL << 8); //Enable TIMER0B
return;
}
/** Handler for TIMER0B interrupt **/
// Currently not used for anything
void TIMER0B_Handler(void)
{
return;
}
/** Sets the frequency output of PWM signal **/
void set_frequency(uint16_t frequency)
{
pwm_load = (SYS_FREQ/frequency)/64 - 1;
PWM0->_2_LOAD = pwm_load;
}
/** Sets the PWM CMPA in order to get the appropriate duty cycle **/
void set_volume(uint8_t volume)
{
uint16_t ticks_cmpa;
ticks_cmpa = (volume*pwm_load)/100;
if (ticks_cmpa == pwm_load)
ticks_cmpa--;
PWM0->_2_CMPA = ticks_cmpa;
}
/** Initializes PWM signal on PE4 for output to speaker **/
void PWM_init(void)
{
SYSCTL->RCGC0 |= (1UL << 20);
//SYSCTL->RCGC2 |= (1UL << 4); // Cannot configure this AND RCGCGPIO
GPIOE->AFSEL |= (1UL << 4); // Alternate function
GPIOE->PCTL |= (0x4 << 16); // Select M0PWM4
SYSCTL->RCC |= (1 << 20); // Use PWM divider
SYSCTL->RCC |= (0x7 << 17); // Divider set to divide by 64
PWM0->_2_CTL = 0x0UL; // Immediate update to parameters
PWM0->_2_GENA = 0x8CUL; // Drive PWM high when counter matches LOAD, drive low when matches CMPA
set_frequency(frequency);
set_volume(volume);
PWM0->_2_CTL = 0x1UL; // enabled PWM module 0, generator 2
PWM0->ENABLE |= (1UL << 4); // enable PWM module 0
}
/** Plays happy birthday song **/
void play_hbd(void)
{
playing_hbd = 1;
interrupt_timer_1B(500);
return;
}
/** Configures TIMER1B as periodic count down **/
void init_timer_1B(void)
{
uint32_t dummy;
SYSCTL->RCGCTIMER |= 1UL << 1; //Enable TIMER1
dummy = SYSCTL->RCGCTIMER; //Dummy read to give TIMER time to respond
TIMER1->CTL &= ~(8UL << 0); //Disable TIMERB in TIMER1 during config
TIMER1->CFG = 0x4UL; //Independent 16-bit timers
TIMER1->TBMR = 0x2UL; //Periodic Timer mode
TIMER1->TBPR = PRESCALE; //Prescale value
TIMER1->ICR = (1UL << 8); // Clear TBTRIS
TIMER1->IMR = (1UL << 8); // Interrupt enabled for TIMER1B
TIMER1->CTL |= (1UL << 9); //TBSTALL
return;
}
/** Sets TIMER0B as periodic timer. Interrupt on every expiration **/
void interrupt_timer_1B(uint32_t delay_time_ms)
{
uint32_t timer_reload_val;
uint32_t prescale_output;
init_timer_1B();
//The reload value for the timer is the delay period we would like divided by
//prescaled clock period
prescale_output = SYS_FREQ/(PRESCALE+1);
// VERY IMPORTANT: delay_time_ms/MILLISEC_IN_SEC will give 0, THE ORDER MATTERS HERE.
timer_reload_val = (delay_time_ms*prescale_output)/MILLISEC_IN_SEC -1;
TIMER1->CTL &= ~(1UL << 8); //Disable TIMER1B
TIMER1->TBILR = timer_reload_val;
TIMER1->CTL |= (1UL << 8); //Enable TIMER1B
return;
}
/** Handler for TIMER1B interrupt
* Used for playing hbd song
**/
void TIMER1B_Handler(void)
{
uint32_t note;
static uint32_t current_note = 0;
// If we stopped wanting to play hbd, then disable TIMER1B and reset current note to 0
if (!playing_hbd)
{
TIMER1->CTL &= ~(1UL << 8); //Disable TIMER1B if not playing hbd
TIMER1->ICR = (1UL << 8); // Clear interrupt at GPTM to de-assert IRQ#22 signal
NVIC->ICPR[0] = (1UL << 22); //Clear pending bit in NVIC for IRQ#22 TIMER1B
current_note = 0;
return;
}
// If happy birthday has not finished playing, advance to next note
if (current_note < numberOfElements)
{
note = hbd_notes[current_note++]-1;
frequency = notes[note]*pitch_adjust;
}
else
{
// turn off for one, then restart the song
frequency = 0;
current_note = 0;
}
set_frequency(frequency);
set_volume(volume);
TIMER1->ICR = (1UL << 8); // Clear interrupt at GPTM to de-assert IRQ#22 signal
NVIC->ICPR[0] = (1UL << 22); //Clear pending bit in NVIC for IRQ#22 TIMER1B
}
void init_pushButton(void)
{
/* Setting up PF4 (SW1) */
GPIOF->DIR &= ~(1UL << 4); // Set PF4 as input
GPIOF->DEN |= (1UL << 4); // Digital enable PF4
GPIOF->IM &= ~(1UL << 4); // Mask PF4 for now while configuring
GPIOF->IS &= ~(1UL << 4); // PF4 Edge sensitive
GPIOF->IBE &= ~(1UL << 4); // Interrupt generation controlled by IEV
GPIOF->IEV |= (1UL << 4); // Interrupt generated on rising edge
GPIOF->PUR |= (1UL << 4); // Weak pull up resistor for PF4
GPIOF->ICR |= (1UL << 4); // Clear PF4 Interrupt
GPIOF->IM |= (1UL << 4); // Enable Interrupts for PF4
/* Setting up PF0 (SW2). Requires more code due to lock */
GPIOF->IM &= ~(1UL << 0); // Mask PF0 for now while configuring
GPIOF->LOCK = 0x4C4F434B; // Enables write access to GPIOCR
*((uint32_t *) &GPIOF->CR) = 0x1F;
GPIOF->DIR &= ~(1UL << 0); // Set PF0 as input
GPIOF->DEN |= (1UL << 0); // Digital enable PF0
GPIOF->AMSEL &= ~(1UL << 0);
GPIOF->PCTL &= ~(1UL << 0);
GPIOF->AFSEL &= ~(1UL << 0);
GPIOF->IS &= ~(1UL << 0);
GPIOF->IBE &= ~(1UL << 0);
GPIOF->IEV |= (1UL << 0);
GPIOF->PUR |= (1UL << 0);
GPIOF->ICR |= (1UL << 0);
GPIOF->IM |= (1UL << 0);
}
void GPIOF_Handler(void)
{
// If Interrupt caused by PF4
if(GPIOF->RIS & (1UL << 4))
{
if (state<2) state++; // move to the next states
else state=0;
switch(state){
case 0: // normal operation
turn_off_green_LED();
turn_on_red_LED();
playing_hbd = 0;
break;
case 1: // happy birthday song
play_hbd();
turn_on_green_LED();
turn_off_red_LED();
playing_hbd = 1;
break;
case 2: // Changing instruments
turn_off_green_LED();
turn_off_red_LED();
playing_hbd = 0;
change_instruments();
break;
default:
turn_off_green_LED();
turn_on_red_LED();
break;
}
GPIOF->ICR |= (1UL << 4); // Clear Interrupt
}
// Else if Interrupt caused by PF0
else if (GPIOF->RIS & (1UL << 0))
{
if (state==2)
{
change_instruments();
}
}
}
void init_instruments()
{
//init a list of instruments
instr_count = 4;
instr_pos =0;
strcpy(instruments[0].name, "Bass");
instruments[0].frequency = 55;
strcpy(instruments[0].name, "Tenor");
instruments[1].frequency = 110;
strcpy(instruments[0].name, "Alto");
instruments[2].frequency = 125;
strcpy(instruments[0].name, "Soprano");
instruments[3].frequency = 250;
//init the LCD module
//LCD_init();
}
void change_instruments()
{
uint32_t i;
frequency = instruments[instr_pos].frequency;
//display the instruments name on the LCD
for(i=0; instruments[instr_pos].name[i] != '\0'; i++)
{
send_data(instruments[instr_pos].name[i]);
//delay_timer(100);
// Since screen has enough to display 16 chars per line, if going off line, start shifting screen.
if ( i >= 14 )
send_command(0x18);
}
if (instr_pos<instr_count) instr_pos++;
else instr_pos = 0;
}
<file_sep>#include "CU_TM4C123.h"
#define SYS_CLK 16000UL // 16 Mhz reduced by 1000
#define PWM_FREQ 38UL // 38 Khz reduced by 1000
#define PRESCALE 199UL //Clock prescale value
#define us 1000UL // microsecond reduced by 1000
void static init_GPIO(void);// initilize GPIO pins
void static init_PWM(void);//a PWM used to generate 38 kHz square wave
void static enable_PWM(void);// enable PWM output
void static disable_PWM(void);// disable PWM output
void static init_NVIC(void); // set up the NVIC for interrupts
void static init_timer(void);//a timer used to enable/disable the PWM
void static set_timer(void);//set the count down value for the timer
void static init_frame(uint16_t addr, uint8_t cmd); //initilize the frame before tx
uint32_t frame;
int frame_index = 32;//point to current value in the frame
uint8_t end_of_frame = 0; //indicate end of frame
uint8_t nextbit; // the next bit needed to be transmited
uint8_t isr_count = 0; //count number of ISR entered
uint8_t state = 0; //state of the FSM
//Define states of the FSM
#define start_a 0
#define start_b 1
#define send0_a 2
#define send0_b 3
#define send1_a 4
#define send1_b 5
#define stop 6
#define end 7
//Define function acting for each state
void static f_start_a(void);
void static f_start_b(void);
void static f_send0_a(void);
void static f_send0_b(void);
void static f_send1_a(void);
void static f_send1_b(void);
void static f_stop(void);
void static f_end(void);
int main(void)
{
uint16_t addr= 0xF7; /*Address: 0000000011110111*/
uint8_t cmd_on= 0xC0; /* On command: 11000000 */
init_GPIO();
init_PWM();
init_NVIC();
init_timer();
init_frame(addr,cmd_on);
//trigger the FSM by setting the timer
set_timer();
//wait
while (1) {}
}
/* Init GPIO pins
** PWM -> PE4 -> IR transmitter
**
*/
void static init_GPIO(void)
{
int dummy;
//Enable GPIOE
SYSCTL->RCGCGPIO |= (1UL << 4);
//Do a dummy read to insert a few cycles after enabling the peripheral.
dummy = SYSCTL->RCGCGPIO;
//Sets PE4 for digital out
GPIOE->DIR |= (1UL << 4);
GPIOE->DEN |= (1UL << 4);
}
/* PWM generate 38 Khz square wave
**
**
*/
void static init_PWM()
{
//Configurate the PWM module 0
SYSCTL->RCGC0 |= (1UL << 20);
GPIOE->AFSEL |= (1UL << 4);// Alternate function
GPIOE->PCTL |= (0x4 << 16);// Select M0PWM4
SYSCTL->RCC |= (1 << 20);// Use PWM divider
SYSCTL->RCC |= (0x0 << 17);//Devide Clk by 2
PWM0->_2_LOAD = SYS_CLK/2/PWM_FREQ - 1; // Set PWM frequency to 38 Khz
PWM0->_2_CMPA = PWM0->_0_LOAD/2; // toggle at half period to get 50% duty cycle
PWM0->_2_CTL = 0x0UL;// Immediate update to parameters
PWM0->_2_GENA = 0x8CUL;//Drive PWM high when counter matches LOAD, drive low when matches CMPA
PWM0->_2_CTL = 0x1UL;// enabled PWM module 0, generator 2
}
/* Enable the PWM ouput
**
**
*/
void static enable_PWM()
{
PWM0->ENABLE |= (1UL << 4);// enable PWM module 0
}
/* Disable the PWM output
**
*/
void static disable_PWM()
{
PWM0->ENABLE &= ~(1UL << 4);// disable PWM module 0
}
/* Set up the NVIC
** Register TIMER0 interrupt with the NVIC
**
*/
void init_NVIC()
{
//Set up priority = 1 for TIMER0
NVIC->IP[19] = 1<<5; //TIMER0 is IRQ 19
//Clear pending bit for TIMER0
NVIC->ICPR[0] = 1UL<<19;
//Enable interrupt for TIMER0 at NVIC
NVIC->ISER[0] = 1UL<<19;
}
/* Init the Timer 0A
** Timer used to enable/disable PWM
** ... in a certain period
** Timer0A reload value is in us
** Timer0A is one-shot countdown timer
*/
void init_timer()
{
SYSCTL->RCGCTIMER |= (1UL<<0); //Enable TIMER0
TIMER0->CTL &= ~(1UL<<0); //disable TIMER0A while configuring
TIMER0->CFG = 0x4UL; // independent 16-bit timers
TIMER0->TAMR = 0x1UL; // TIMER0A is one-shot countdown timer
TIMER0->TAPR = PRESCALE; // set CLK prescale value
TIMER0->ICR = 1UL; // clear time-out interrupt
TIMER0->CTL |= (1UL << 9); // TBSTALL
}
/* Set up count_down value for timer
** and start count down
** count down value in is us
**
*/
void set_timer()
{
TIMER0->CTL &= ~(1UL<<0); //disable TIMER0A while configuring
TIMER0->TAILR = 44; // 44 = 562.5 us
TIMER0->IMR |= 1UL; //arm timer interrupt
TIMER0->CTL |= (1UL<<0);// enable TIMER0A
}
/* Init the frame before Tx
** concatenate the addr and command and inverse command
** to create a 32 bit frame
**
*/
void init_frame(uint16_t addr, uint8_t command)
{
uint8_t command_inverse;
frame_index = 32;
command_inverse = ~command;
frame = (addr<<16|command<<8|command_inverse);
}
/* TIMER0A interrupt handler
**
**
*/
void TIMER0A_Handler(void)
{
//clear interrupt at GPTM
TIMER0->ICR = 1UL;
//clear pending bit in NVIC
NVIC->ICPR[0] = 1UL<<19;
//Enter state machine
switch (state){
case start_a: /* State start_a */
f_start_a();
if (isr_count == 16)
{
isr_count = 0;
state = start_b;
}
else state = start_a; // this state repeat in 16 ISR counts
break;
case start_b: /* State start_b */
f_start_b();
if (isr_count == 8)
{
isr_count = 0;
if (nextbit == 0) state = send0_a;
else state = send1_a;
}
else state = start_b;// this state repreat in 8 ISR counts
break;
case send0_a: /* State send0_a */
f_send0_a();
state = send0_b;
break;
case send0_b: /* State send0_b */
f_send0_b();
if (end_of_frame) state = stop;
else if (nextbit == 0) state = send0_a;
else state = send1_a;
break;
case send1_a: /* State send1_a */
f_send1_a();
state = send1_b;
break;
case send1_b: /* State send1_b */
f_send1_b();
if (isr_count == 3)
{
isr_count = 0;
if (end_of_frame) state = stop;
else if (nextbit == 0) state = send0_a;
else state = send1_a;
}
else state = send1_b;// this state repeat in 3 ISR counts
break;
case stop: /* State stop */
f_stop();
state = end;
break;
case end: /* State end */
f_end();
break;
default:
state = end;
break;
}
set_timer();
}
/* Start_a state
** Enable PWM
** Increase ISR count
**
*/
void static f_start_a()
{
//Turn on PWM
enable_PWM();
//Increase ISR count
isr_count++;
}
/* Start_b state
** Disable PWM
** Increase ISR count
**
*/
void static f_start_b()
{
//Turn off PWM
disable_PWM();
//Increase ISR count
isr_count++;
//Decrease frame_index if permit
if (isr_count == 8) frame_index--;
//determine the bit to trasmit
if (frame_index < 0) end_of_frame = 1;
else nextbit = (frame & (1UL<<frame_index))>>frame_index;
}
/* Send0_a state
** Enable PWM
**
*/
void static f_send0_a()
{
//Turn on PWM
enable_PWM();
}
/* Send0_b state
** Disable PWM
** Increase frame_index
**
*/
void static f_send0_b()
{
//Turn off PWM
disable_PWM();
//Decrease frame_index
frame_index--;
//Determine the bit to trasmit
if (frame_index < 0) end_of_frame = 1;
else nextbit = (frame & (1UL<<frame_index))>>frame_index;
}
/* send1_a state
** Enable PWM
**
*/
void static f_send1_a()
{
//Turn on PWM
enable_PWM();
}
/* send1_b state
** Disable PWM
** Increase ISR count
** Increase frame_index if ISR count = 3
**
*/
void static f_send1_b()
{
//Turn off PWM
disable_PWM();
//Increase ISR count
isr_count++;
// Decrease frame_index if permit
if (isr_count == 3) frame_index--;
//determine the bit to trasmit
if (frame_index < 0) end_of_frame = 1;
else nextbit = (frame & (1UL<<frame_index))>>frame_index;
}
/* stop state
** Enable PWM
**
*/
void static f_stop()
{
//Turn on PWM
enable_PWM();
}
/* end state
** Disable PWM
**
*/
void static f_end()
{
//Turn off PWM
disable_PWM();
}
<file_sep>#include "CU_TM4C123.h"
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#define BUFFER_SIZE 10//8 bytes in each buffer
#define POOL_SIZE 20 // Pool has 8 buffers
#define PRESCALE 255UL; //Clock prescale value
/* Define a buffer and buffer pool */
typedef char buffer_t[BUFFER_SIZE]; // define buffer
typedef struct{
buffer_t pool[POOL_SIZE]; // a pool of buffers
uint8_t head;
uint8_t tail;
} queue_t;
/* Init functions */
void static UART_Init(void);//Initialize the UART
void static Timer_Init(void);//Initialize the Timer
void static NVIC_Init(void);//Initialize the NVIC
/* Methods to deal with the buffer queue*/
void queue_Init(queue_t* queue);//initialize the queue before using
int is_queue_full(queue_t* queue);//indicate if the queue is full
int is_queue_full(queue_t* queue);//indicate if the queue is empty
buffer_t *get_buffer(queue_t* queue); // return a pointer to the buffer from the queue
int release_buffer(queue_t *queue, buffer_t* out_buffer);//release a buffer back to queue
/* Methods to deal with the UART */
void Tx_message(char *msg_buffer, uint32_t msg_size);//will accept the msg for Tx'ing
void Rx_message(buffer_t *buffer);//return a pointer to an Rx msg
void start_UART();// Kick-start the UART TX at the beginning
/* Methods to deal with the Timer */
void start_timer(void);
/* Methods to deal with the Clock */
void set_Clock(char *val);//set the clock to value format THH:MM
void Create_Clock_Display(char *clock_display);//create a clock display
void Increase_Clock(void); //Increase the clock by 1 second
void faster_Clock(void);//faster the clock by 2
void slower_Clock(void);//slower the clock by 2
void pause_Clock(void);//pause the clock
void continue_Clock(void);//continue the clock
/* Misc */
void static stop_execution(void);//stop execution right away
void UART_OutChar(unsigned char data);
/* ------------ GLOBAL Variables ------------- */
queue_t tx_queue, rx_queue; // define a tx_queue, rx_queue
uint8_t hours, mins, secs; //hold the clock variables
uint8_t busy_input = 0; //indicate if user is inputting
/* ------------ queue_init() ------------------
** Initialize the message queue
**
*/
void queue_Init(queue_t* queue)
{
queue -> head = 0;
queue -> tail = 0;
}
/* ------------ is_queue_full() -----------------
** Indicate if the queue is full
** Return 1 if full, 0 otherwise
*/
int is_queue_full(queue_t* queue)
{
if ((queue -> tail + 1)%POOL_SIZE == queue -> head)
return 1;
return 0;
}
/* ------------ is_queue_empty() -----------------
** Indicate if the queue is empty
** Return 1 if empty, 0 otherwise
*/
int is_queue_empty(queue_t* queue)
{
if (queue -> tail == queue -> head)
return 1;
return 0;
}
/* ------------ get_buffer() --------------------
** This method return an available buffer from the queue
** Input: pointer to the queue
** Output: Pointer to a buffer allocated from the queue
**
*/
buffer_t *get_buffer(queue_t* queue)
{
buffer_t* buff;
if (!is_queue_full(queue)) //if queue is not full yet
{
buff = &(queue->pool[queue->tail]);
queue->tail = (queue->tail + 1)%POOL_SIZE;
return buff;
}
else
{
stop_execution();
return NULL;
}
}
/* ------------ release_buffer() -------------------
** This method release a buffer back to the queue
** Input: pointer to the queue
** Output: Return 1 on success, 0 on failure
**
*/
int release_buffer(queue_t *queue, buffer_t* out_buffer)
{
buffer_t *buffer= NULL;
if (!is_queue_empty(queue)) //if queue is not empty (just in case)
{
buffer = &(queue -> pool[queue ->head]);
queue ->head = (queue ->head + 1)%POOL_SIZE;
memcpy(out_buffer,buffer,BUFFER_SIZE);
//Clean the buffer
memset(buffer,0x00,BUFFER_SIZE);
return 1;
}
return 0;
}
/* ------------ Tx_message -------------------------
** Transmit the msg_buffer by putting it to tx_queue
**
** If the msg_buffer is higher than BUFFER_SIZE
** The msg_buffer will be truncated and put into several
** ... buffers
** Input: pointer to msg_buffer, size of msg buffer
** Output: None
*/
void Tx_message(char *msg_buffer, uint32_t msg_size)
{
buffer_t *buff;
uint32_t i=0;
uint8_t n;
while (i<msg_size)
{
__set_PRIMASK(1);//disable all interrupts
buff = get_buffer(&tx_queue); //get a buffer from the tx_queue
__set_PRIMASK(0);//enable intterrupts
//copy msg to the buffer
n = (msg_size-i>BUFFER_SIZE)?(BUFFER_SIZE-1):msg_size-i;
strncpy((char*)buff,&msg_buffer[i],n);//truncate the msg_buffer
(*buff)[n+1]='\0';
i += n;
//Unmask Tx interrupt at UART
UART0 -> IM |= (UART_IM_TXIM);
}
}
/* ------------ Rx_message --------------------
** Return a buffer from the rx_queue
**
** Input: None
** Output: pointer to buffer containing msg
*/
void Rx_message(buffer_t *buffer)
{
__set_PRIMASK(1);//disable all interrupts
release_buffer(&rx_queue,buffer); //release a buffer from the rx_queue
__set_PRIMASK(0);//enable intterrupts
}
/* ------------ UART_Init ---------------------
** Initialize the UART for 115,200 baud rate (assuming 16 MHz UART clock),
** 8 bit word length, no parity bits, one stop bit, FIFOs enabled
** Input: none
** Output: none
*/
void UART_Init(void){
SYSCTL -> RCGCUART |= SYSCTL_RCGCUART_R0; // activate UART0
SYSCTL -> RCGCGPIO |= SYSCTL_RCGCGPIO_R0; // activate port A
UART0 -> CTL &= ~UART_CTL_UARTEN; // disable UART
//UART0 -> CTL |= UART_CTL_EOT; //End of Tranmision
UART0 -> IBRD = 8; // IBRD = int(16,000,000 / (16 * 115,200)) = int(8.680555) = 8
UART0 ->FBRD = 44; // FBRD = int(0.680555 * 64 + 0.5) = 44
// 8 bit word length (no parity bits, one stop bit, enable FIFOs)
UART0 -> LCRH |= (UART_LCRH_WLEN_8|UART_LCRH_FEN);
GPIOA -> AMSEL &= ~0x03; // disable analog functionality on PA
GPIOA -> AFSEL |= 0x03; // enable alt funct on PA1-0
GPIOA -> DEN |= 0x03; // enable digital I/O on PA1-0
// configure PA1-0 as UART (just in case)
GPIOA -> PCTL = (GPIOA -> PCTL & 0xFFFFFF00)+0x00000011;
UART0 -> ICR |= (UART_ICR_TXIC | UART_ICR_RTIC); // clear interrupt
UART0 -> IM |= (UART_IM_RTIM|UART_IM_TXIM) ; //arm interrupt for RX and TX
UART0 -> CTL |= UART_CTL_UARTEN; // enable UART
}
/* ------------- Start UART ---------------------
** The UART TX need to be kick-start by filling
** ... up the FIFO
**
*/
void start_UART()
{
char s[]="00:00:00\r\n";
int i;
for (i=0;i<sizeof(s);i++)
UART0->DR = s[i];
}
/* ------------ NVIC_Init -----------
** Initialize the NVIC
** Register the interrupt from the UART0 TX and UART0 RX
** TIMER0 ISQ = 19; UART0 ISR = 5
** Input: None
** Output: None
*/
void NVIC_Init(void)
{
/* TIMER0A */
//Set up priority = 1 for TIMER0A
NVIC -> IP[19] = 1<<5; //TIMER0A is IRQ 19
//Clear pending bit for TIMER0A
NVIC -> ICPR[0] = 1UL<<19;
//Enable interrupt for TIMER0A at NVIC
NVIC -> ISER[0] = 1UL<<19;
/* TIMER0B */
//Set up priority = 3 for TIMER0B
NVIC -> IP[20] = 3<<5; //TIMER0B is IRQ 20
//Clear pending bit for TIMER0B
NVIC -> ICPR[0] = 1UL<<20;
//Enable interrupt for TIMER0B at NVIC
NVIC -> ISER[0] = 1UL<<20;
/* UART0 */
//Set up priority = 2 for UART0
NVIC -> IP[5] = 2<<5; //UART0 is IRQ 5
//Clear pending bit for UART0
NVIC -> ICPR[0] = 1UL<<5;
//Enable interrupt for UART0 at NVIC
NVIC -> ISER[0] = 1UL<<5;
/* Push Button */
/* Init for PF4 (SW1) and PF0 (SW2) */
NVIC->IP[30] = (4 << 5); //IRQ#30 for GPIOF, see ds pg104
NVIC->ICPR[0] = (1UL << 30); //Clear pending bit to be safe
NVIC->ISER[0] = (1UL << 30); //Enable interrup at NVIC
}
/* ------------- GPIO_Init --------------------
** Initialize the GPIO used in this lab
**
*/
void GPIO_Init(void)
{
uint32_t ulLoop;
// Set up to use LED
// Enable GPIOF port (used for the on-board LED).
SYSCTL->RCGCGPIO |= (1UL << 5);
// Do a dummy read to insert a few cycles after enabling the peripheral.
ulLoop = SYSCTL->RCGCGPIO;
// Enable the GPIO pin for the LED (PF3).
GPIOF->DIR |= 0x08; // Set the direction as output
GPIOF->DEN |= 0x08; // Enable the GPIO pin for digital function
// Init the Push Button
/* Setting up PF4 (SW1) */
GPIOF->DIR &= ~(1UL << 4); // Set PF4 as input
GPIOF->DEN |= (1UL << 4); // Digital enable PF4
GPIOF->IM &= ~(1UL << 4); // Mask PF4 for now while configuring
GPIOF->IS &= ~(1UL << 4); // PF4 Edge sensitive
GPIOF->IBE &= ~(1UL << 4); // Interrupt generation controlled by IEV
GPIOF->IEV |= (1UL << 4); // Interrupt generated on rising edge
GPIOF->PUR |= (1UL << 4); // Weak pull up resistor for PF4
GPIOF->ICR |= (1UL << 4); // Clear PF4 Interrupt
GPIOF->IM |= (1UL << 4); // Enable Interrupts for PF4
}
/* ------------- Timer_Init() -------------------
** Initialize the Timer used in this lab
** Use Timer0A for Clock
** Use Timer0B for polling RX mesg queue
**
*/
void Timer_Init(void)
{
SYSCTL->RCGCTIMER |= (1UL<<0); //Enable TIMER0
TIMER0->CTL &= ~(TIMER_CTL_TAEN); //disable TIMER0A while configuring
TIMER0->CFG = 0x4UL; // independent 16-bit timers
TIMER0->TAMR = 0x2UL; // TIMER0A count-down, periodic
TIMER0->TAPR = PRESCALE; // set CLK prescale value
TIMER0->CTL &= ~(TIMER_CTL_TBEN); // disable TIMER0B while configuring
TIMER0->TBMR = 0x2UL; // TIMER0B count-down, periodic
TIMER0->TBPR = PRESCALE; // set CLK prescale value
TIMER0->ICR |= (TIMER_ICR_TATOCINT | TIMER_ICR_TBTOCINT); // clear time-out interrupt
TIMER0->CTL |= (1UL << 9); // TBSTALL
}
/* ------------- start_timer() ------------------------
** Start the periodic timer
** The timer0A initially generates an interrupt every sec
** The timer0b initially generates an interrupt every 200ms
**
*/
void start_timer()
{
TIMER0->TAILR = 62499; // 1 secs
TIMER0->TBILR = 16000; // 200 ms
TIMER0->IMR |= (TIMER_IMR_TATOIM | TIMER_IMR_TBTOIM); //arm timer interrupt
TIMER0->CTL |= (TIMER_CTL_TAEN | TIMER_CTL_TBEN);// enable TIMER0A and TIMER0B
}
/* ------------- faster_Clock() -----------------------
** Increase the speed of clock by decrease the value by 2
**
*/
void faster_Clock()
{
TIMER0->CTL &= ~(1UL<<0); //disable TIMER0A while configuring
TIMER0->TAILR = TIMER0 -> TAILR/2; // decrease value by 2
TIMER0->CTL |= (1UL<<0);// enable TIMER0A
}
/* ------------- slower_Clock() -----------------------
** Decrease the speed of clock by increase the value by 2
**
*/
void slower_Clock()
{
TIMER0->CTL &= ~(1UL<<0); //disable TIMER0A while configuring
TIMER0->TAILR = TIMER0 -> TAILR*2; // increase value by 2
TIMER0->CTL |= (1UL<<0);// enable TIMER0A
}
/* ------------- set_Clock() --------------------
** Set the clock to a specific value
** value format: "THH:MM"
**
*/
void set_Clock(char *val)
{
char buff[10];
char sub[2];
strncpy(sub,&val[1],2);//extract the HH
hours = atoi(sub);
strncpy(sub,&val[4],2);//extract the MM
mins = atoi(sub);
secs = 0;
Create_Clock_Display(buff);//generate clock HH:MM:SS
Tx_message(buff,sizeof(buff));//transmit the buffer
}
/* ------------- pause_Clock() -----------------------
** Pause the Clock
**
*/
void pause_Clock()
{
TIMER0->CTL &= ~(TIMER_CTL_TAEN); //disable TIMER0A
}
/* ------------- continue_Clock() -----------------------
** continue the Clock
**
*/
void continue_Clock()
{
TIMER0->CTL |= (TIMER_CTL_TAEN); //enable TIMER0A
}
/* ------------- Create_Clock_Display-----------
** Create a string representation of the clock
** Display format: HH:MM:SS
**
*/
void Create_Clock_Display(char *clock_display)
{
sprintf(clock_display, "%02d:%02d:%02d\r\n",hours,mins,secs);
}
/* ------------- Increase_Clock ----------------
** Increase the clock by 1 sec
**
*/
void Increase_Clock()
{
secs++;//increase clock by 1 sec
if (secs==60)
{
secs = 0;
mins++;
}
if (mins==60)
{
mins = 0;
hours++;
}
if (hours==12)
{
hours = 0;
}
}
/* ------------- Timer0A_Handler ---------------
** Handles the interrupt from timer0A
**
*/
void TIMER0A_Handler(void)
{
char buff[10];
//clear interrupt at GPTM
TIMER0->ICR = TIMER_ICR_TATOCINT;
//clear pending bit in NVIC
NVIC->ICPR[0] = 1UL<<19;
Increase_Clock();//Increase the Clock
if (!busy_input) // if the user is inputting, dont generate Tx data
{
Create_Clock_Display(buff);//generate clock HH:MM:SS
Tx_message(buff, sizeof(buff));//transmit the buffer
}
}
/* ------------- Timer0B_Handler ---------------
** Handles the interrupt from timer0B
**
*/
void TIMER0B_Handler(void)
{
buffer_t buff;
char c;
//clear interrupt at GPTM
TIMER0->ICR = TIMER_ICR_TBTOCINT;
//clear pending bit in NVIC
NVIC->ICPR[0] = 1UL<<20;
//read buffer from the rx_queue
Rx_message(&buff);
//Compare actions
c = buff[0];
switch (c)
{
case 'T':
set_Clock((char*)buff);
break;
case 'P':
pause_Clock();
break;
case 'C':
continue_Clock();
break;
case 'F':
faster_Clock();
break;
case 'S':
slower_Clock();
break;
}
}
/* ------------- UART Interrupt Handler --------------
**
**
**
*/
void UART0_Handler(void)
{
char byte;
int i,ret;
static buffer_t rx_temp_buff;
static uint8_t buff_count = 0;
buffer_t *rx_buff;
buffer_t tx_buff;
/* TX ISR */
if ((UART0 -> RIS & UART_RIS_TXRIS) != 0)
{
//Empty buffer from the Tx queue
__set_PRIMASK(1); //Disable all interrupts
ret = release_buffer(&tx_queue,&tx_buff);//get a released buff from tx_queue
__set_PRIMASK(0); //Enable all interrupts
if (ret)
{
for (i=0; i<BUFFER_SIZE; i++)
{
UART0 -> DR = tx_buff[i];
}
}
else //if no more data to send
{
UART0 -> IM &= ~(UART_IM_TXIM) ; //mask TX interrupt at UART
NVIC -> ICPR[0] = 1UL<<5; //clear pending UART interrupt at NVIC
}
}
/* RX ISR*/
if ((UART0 -> RIS & UART_RIS_RTRIS) != 0)
{
UART0 -> ICR |= UART_ICR_RTIC;//clear interrupt
busy_input = 1; //indicating that user is inputting
//transfer data from FIFO to the buffer
do{
byte = (unsigned char)(UART0 -> DR & 0xFF);
rx_temp_buff[buff_count] = byte;
buff_count++;
if (byte == 0x0d) //if byte is carry return
{
buff_count = BUFFER_SIZE;
busy_input = 0; //user finished inputting
}
}while((UART0 -> FR & UART_FR_RXFE)==0 && buff_count<BUFFER_SIZE);
if (buff_count>=BUFFER_SIZE)// if the temp buffer is full
{
//Fill the Rx queue with the temp buffer
__set_PRIMASK(1); //Disable all interrupts
rx_buff = get_buffer(&rx_queue); //get a buffer from the rx_queue
__set_PRIMASK(0); //Enable all interrupts
//copy the temp_buff to the buff
memcpy(rx_buff, &rx_temp_buff, sizeof(*rx_buff));
//reset the temp_buff
buff_count = 0;
}
//transmit the byte back to the terminal for feedback
tx_buff[0] = byte;
for (i = 1; i<BUFFER_SIZE; i++)
{
tx_buff[i]=0x00;
}
Tx_message(tx_buff, sizeof(tx_buff));
}
}
/* ----------- GPIOF_Handler ---------------
** Handle the push button pressed
**
**/
void GPIOF_Handler(void)
{
// If Interrupt caused by PF4
if(GPIOF->RIS & (1UL << 4))
{
GPIOF->ICR |= (1UL << 4); // Clear Interrupt
Tx_message("Button Pressed\r\n", sizeof("Button Pressed\r\n"));
}
}
/* -------------- Stop execution -------------
** Turn on LED and stop execution
**
*/
void stop_execution(void)
{
GPIOF->DATA |= 0x08;//Turn on the LED
__set_PRIMASK(1);//disable all interrupt source
while (1);
}
int main(void)
{
// initialize UART
UART_Init();
// initialize NVIC
NVIC_Init();
// Initialize GPIO
GPIO_Init();
// Initialize the tx_queue and rx_queue
queue_Init(&tx_queue);
queue_Init(&rx_queue);
// Initialize the Timer
Timer_Init();
// Start the UART
start_UART();
// Start the timer
start_timer();
while(1);
}
|
d0c385099cf864c2afebb0dd8b00fbdc667fd0d0
|
[
"Markdown",
"C"
] | 4 |
Markdown
|
ducminh296/Embedded-Codes
|
e2d00796316b31414c607f60891f07803e95b92d
|
54b0dba8118ac54b8212fc0601be796afd3ce085
|
refs/heads/master
|
<file_sep>circles
=======
Drawing some circles in a canvas...<file_sep>function Main() {
Main.MAIN_DELAY = 1000 / 60;
Main._Z = 90;
Main._X = 88;
this.mainLoop = null;
this.canvas = document.querySelector('canvas');
{
this.lineWidth = 1;
this.numberOfCircles = 1;
this.scale = 1;
this.radius = 30;
this.clearOnRepaint = true;
this.showCenter = true;
}
(function(instance) {
btStart = document.getElementById('bt-start');
btStart.disabled = false;
btStart.addEventListener('click', function(evt) { this.disabled = true; instance.start(); }, true);
window.addEventListener('keydown', function(evt) { instance.doKeyDown(evt); }, true);
})(this);
}
Main.prototype.refresh = function() {
this.checkControl();
var canvas = this.canvas;
var context = canvas.getContext('2d');
context.clearRect(0, 0, canvas.width, canvas.height);
this.draw();
};
Main.prototype.draw = function() {
var canvas = this.canvas;
var left = Math.round(canvas.width / 2);
var top = Math.round(canvas.height / 2);
var context = canvas.getContext('2d');
context.save();
context.lineWidth = this.lineWidth;
context.strokeStyle = 'black';
context.beginPath();
context.arc(left, top, this.radius, 0, 2*Math.PI, false);
context.stroke();
context.restore();
};
Main.prototype.checkControl = function() {
this.clearOnRepaint = document.getElementById('clear-on-repaint').checked;
this.showCenter = document.getElementById('show-center').checked;
this.scale = document.getElementById('zoom-level').value;
this.lineWidth = document.getElementById('line-width').value;
this.numberOfCircles = document.getElementById('number-of-circles').value;
this.radius = document.getElementById('initial-radius').value * this.scale;
};
Main.prototype.start = function() {
var instance = this;
this.mainLoop = setInterval(function() { instance.refresh(); }, Main.MAIN_DELAY);
};
Main.prototype.stop = function() {
if ( this.mainLoop != null ) {
clearInterval(this.mainLoop);
this.mainLoop = null;
}
};
Main.prototype.doKeyDown = function(evt) {
var key = evt.keyCode;
if ( key == Main._Z ) {
this.scale *= 0.95;
evt.preventDefault();
} else if ( key == Main._X ) {
this.scale *= 1.05;
evt.preventDefault();
}
}<file_sep>(function() {
var main = null;
function init() {
main = new Main();
document.getElementById('bt-start').click();
}
window.addEventListener('load', init, false);
})();
|
948fd49fc009d99d644928e341795bf0b2b51ebe
|
[
"Markdown",
"JavaScript"
] | 3 |
Markdown
|
crisstanza/circles
|
41cbaf8617d655936a4437aec11c27abedde2f4f
|
8f312541790a8f0f13e32cd53d0341a7e853ff16
|
refs/heads/master
|
<file_sep><Query Kind="Statements">
<Connection>
<ID>1c2e1a44-f376-4dd1-886c-4316c0d6261a</ID>
<Persist>true</Persist>
<Server>.\SQLEXPRESS</Server>
<Database>Chinook</Database>
</Connection>
</Query>
ar results = from g in Genres
where g.Name
select new
{
Genre = g.Name,
TrackCount = g.Tracks.Count()
};
results.Dump("Query Math 1");<file_sep># LINQ_practice
Practice for LINQ
Name: Adley
Phone:18845137152
Email:<EMAIL>
|
96df65a5b061abfd350509e1ff763cc0ec045120
|
[
"Markdown",
"C#"
] | 2 |
C#
|
yjiang25/LINQ_practice
|
2c353d13886412fffcf05f33f15c108dddd48fc0
|
a05966d67e62c979f79596db03829ec672306d66
|
refs/heads/main
|
<file_sep>#pragma once
#include <QImage>
#include <iostream>
#include <cstdlib>
/**
* Функция для приведения значений к допустимому диапазону значений между max и min
*/
template <class T>
T clamp(T value, T max, T min);
class Filter
{
protected:
//функция расчёта нового цвета пикслея(передаём константную ссылку на картинку и координаты пикселя)
virtual QColor calcNewPixelColor(const QImage& img, int x, int y) const = 0;//чисто виртуальная
public:
virtual ~Filter() = default;//создаём деструктор и отсавляем его реализацию компилятору(=default)
virtual QImage process(const QImage& img) const;//обработка всего изображения
};
//фильтр Инверсии
class InvertFilter :public Filter
{
//перегружаем функцию расчёта нового цвета пикселя
QColor calcNewPixelColor(const QImage& img, int x, int y) const override;
};
class Kernel
{
protected:
//данные ядра
std::unique_ptr<float[]> data;//массив флотов
//радиус ядра
std::size_t radius;
//размер памяти ядра
std::size_t getLen() const
{
return getSize() * getSize();
}
public:
//конструктор пустого ядра
Kernel(std::size_t radius) :radius(radius)
{
//выделение памяти под массив размера getLen()
data = std::make_unique<float[]>(getLen());
}
//конструктор копирования
Kernel(const Kernel& other) : Kernel(other.radius)
{
std::copy(other.data.get(), other.data.get() + getLen(), data.get());
}
//аксессоры
std::size_t getRadius() const//радиус
{
return radius;
}
std::size_t getSize() const//размер
{
return radius * 2 + 1;
}
float operator[](std::size_t id) const//оператор индексации
{
return data[id];
}
float& operator[](std::size_t id)
{
return data[id];
}
};
//класс ,реализующий выисление операции свёртки с ядром
class MatrixFilter : public Filter
{
protected:
Kernel mKernel;
QColor calcNewPixelColor(const QImage& img, int x, int y) const override;
public:
MatrixFilter(const Kernel& kernel) : mKernel(kernel) //в конструктор необходимо передать ядро
{
}
virtual ~MatrixFilter() = default;
};
/* Фильтр размытия */
//ядро
class BlurKernel : public Kernel
{
public:
using Kernel::Kernel;//???????????
//конструктор предка
BlurKernel(std::size_t radius = 2) : Kernel(radius)//: Kernel(radius) это переача аргументов в конструктор предка
{
for (std::size_t i = 0; i < getLen(); i++)//расчёт матрицы весовых коэффициентов
{
data[i] = 1.0f / getLen();//коэффициенты зависят от размера ядра
}
}
};
//класс
class BlurFilter : public MatrixFilter
{
public:
BlurFilter(std::size_t radius = 1) : MatrixFilter(BlurKernel(radius)) {}
};
class GaussianKernel : public Kernel
{
public:
//Наследование конструкторов
using Kernel::Kernel;
GaussianKernel(std::size_t radius = 2, float sigma = 3.f) :Kernel(radius)
{
//коэффициент нормировки ядра
float norm = 0;
int signed_radius = static_cast<int>(radius);
//расчитываем ядро линейного фильтра
for (int x = -signed_radius; x <= signed_radius; x++)
{
for (int y = -signed_radius; y <= signed_radius; y++)
{
std::size_t idx = (x + radius) * getSize() + (y + radius);
data[idx] = std::exp(-(x * x + y * y) / (sigma * sigma));
norm += data[idx];
std::cout <<data[idx]<<" ";
}
std::cout << "\n";
}
//нормируем ядро
std::cout << "Norm:\n";
for (std::size_t i = 0; i <getLen(); i++)
{
data[i] /= norm;
std::cout << data[i] << " ";
}
}
};
class GaussianFilter :public MatrixFilter
{
public:
GaussianFilter(std::size_t radius = 1) : MatrixFilter(GaussianKernel(radius)) {}
};
/*Задания для самостоятельного выполнения*/
/*Точечные фильтры*/
/*GrayScale*/
class GrayScaleFilter :public Filter
{
//перегружаем функцию расчёта нового цвета пикселя
QColor calcNewPixelColor(const QImage& img, int x, int y) const override;
};
/*Сепия*/
class SepiaFilter :public Filter
{
//перегружаем функцию расчёта нового цвета пикселя
QColor calcNewPixelColor(const QImage& img, int x, int y) const override;
};
/*Увеличение яркости*/
class IntensityUP : public Filter
{
QColor calcNewPixelColor(const QImage& img, int x, int y) const override;
};
/*Фильтр Собеля по нахождению градиента яркости*/
class YSobelKernel : public Kernel
{
public:
//Наследование конструкторов
using Kernel::Kernel;
YSobelKernel(std::size_t radius = 1) :Kernel(radius)
{
data[0] = -1; data[1] = -2; data[2] = -1;
data[3] = 0; data[4] = 0; data[5] = 0;
data[6] = 1; data[7] = 2; data[8] = 1;
}
};
class SobelFilter :public MatrixFilter
{
protected:
public:
SobelFilter(std::size_t radius = 1) : MatrixFilter(YSobelKernel(radius)) {}
QColor calcNewPixelColor(const QImage& img, int x, int y) const override;
};
/*Повышение контрасности*/
//ядро
class SharpnessUPKernel : public Kernel
{
public:
//Наследование конструкторов
using Kernel::Kernel;
SharpnessUPKernel(std::size_t radius = 1) :Kernel(radius)
{
data[0] = 0; data[1] = -1; data[2] = 0;
data[3] = -1; data[4] = 5; data[5] = -1;
data[6] = 0; data[7] = -1; data[8] = 0;
}
};
//фильтр
class SharpnessUPFilter :public MatrixFilter
{
public:
SharpnessUPFilter(std::size_t radius = 1) : MatrixFilter(SharpnessUPKernel(radius)) {}
};
/*Серый мир*/
class GreyWorldFilter :public Filter
{
float med_R, med_G, med_B, avg;
public:
GreyWorldFilter()
{
med_R = 1, med_G = 1, med_B = 1, avg = 1;
}
//перегружаем функцию расчёта нового цвета пикселя
void mediumIntensities(const QImage& img);
QColor calcNewPixelColor(const QImage& img, int x, int y) const override;
QImage process(const QImage& img);
};
/*Линейная растяжка*/
class HistugrammFilter : public Filter
{
float intensity_max, intensity_min;
public:
HistugrammFilter()
{
intensity_max = 1, intensity_min=1;
}
void intensities_range_calc(const QImage& img);
QColor calcNewPixelColor(const QImage& img, int x, int y) const override;
QImage process(const QImage& img);
};
/*Фильтры индексации пикселей*/
//
//class ShiftFilter : public Filter
//{
//protected:
// struct
// {
// int k;
// int l;
// } typedef KL;
//public:
// QColor calcNewPixelColor(const QImage& img, int x, int y) const override;
// QImage process(const QImage& img) const override;
// KL calcFromWhereTakeColor(int x,int y) const;
//};
class LocationFilter
{
protected:
struct
{
int k;
int l;
} typedef KL;
public:
virtual KL calcNewPixelLocation(int x, int y) const =0 ;
virtual QImage process(const QImage& img) const;
virtual ~LocationFilter() = default;
};
/*Перенос/сдвиг*/
class ShiftFilter : public LocationFilter
{
public:
KL calcNewPixelLocation(int x, int y) const override;
};
/* Фильтр "эффект стекла" */
class GlassFilter : public LocationFilter
{
public:
QImage process(const QImage& img) const;
KL calcNewPixelLocation(int x, int y) const;
};
/*Филтр Motion Blur*/
//ядро
class MotionBlurKernel : public Kernel
{
public:
using Kernel::Kernel;
MotionBlurKernel(std::size_t radius = 1) :Kernel(radius)
{
int signed_radius = static_cast<int>(radius);
//расчитываем ядро линейного фильтра
for (int x = -signed_radius; x <= signed_radius; x++)
{
for (int y = -signed_radius; y <= signed_radius; y++)
{
std::size_t idx = (x + radius) * getSize() + (y + radius);
if (x == y)
{
data[idx] = (float)1 / getSize();
}
else
{
data[idx] = 0;
}
std::cout << data[idx]<<" ";
}
std::cout << "\n";
}
}
};
//фильтр
class MotionBlurFilter : public MatrixFilter
{
public:
MotionBlurFilter(std::size_t radius = 1) : MatrixFilter(MotionBlurKernel(radius)) {}
};
/*Повышение резкости*/
class UltraSharpnessKernel : public Kernel
{
public:
using Kernel::Kernel;
UltraSharpnessKernel(std::size_t radius = 1) :Kernel(radius)
{
data[0] = -1; data[1] = -1; data[2] = -1;
data[3] = -1; data[4] = 9; data[5] = -1;
data[6] = -1; data[7] = -1; data[8] = -1;
}
};
class UltraSharpnessFilter : public MatrixFilter
{
public:
UltraSharpnessFilter(std::size_t radius = 1) : MatrixFilter(UltraSharpnessKernel(radius)) {}
};
/*Математическая морфология*/
//Чёрно-белый
class BlackWhiteFilter : public Filter
{
public :
QColor calcNewPixelColor(const QImage& img, int x, int y) const override;
};
/*Dilation*/
class DilationKernel : public Kernel
{
public:
using Kernel::Kernel;
//конструктор предка
DilationKernel(std::size_t radius = 1) : Kernel(radius)//: Kernel(radius) это переача аргументов в конструктор предка
{
for (size_t i = 0; i < getLen(); i++)
{
data[i] = 1;
}
/* data[0] = 1; data[1] = 1; data[2] = 1;
data[3] = 1; data[4] = 1; data[5] = 1;
data[6] = 1; data[7] = 1; data[8] = 1;*/
}
};
class DilationFilter : public MatrixFilter
{
public:
DilationFilter(std::size_t radius = 1) : MatrixFilter(DilationKernel(radius)) {}
QImage process(const QImage& img) const override;
QColor calcNewPixelColor(const QImage& img, int x, int y) const override;
};
/*Erosion*/
class ErosionKernel : public Kernel
{
public:
using Kernel::Kernel;
//конструктор предка
ErosionKernel(std::size_t radius = 1) : Kernel(radius)//: Kernel(radius) это переача аргументов в конструктор предка
{
for (size_t i = 0; i < getLen(); i++)
{
data[i] = 1;
}
/* data[0] = 1; data[1] = 1; data[2] = 1;
data[3] = 1; data[4] = 1; data[5] = 1;
data[6] = 1; data[7] = 1; data[8] = 1;*/
}
};
class ErosionFilter : public MatrixFilter
{
public:
ErosionFilter(std::size_t radius = 1) : MatrixFilter(ErosionKernel(radius)) {}
QImage process(const QImage& img) const override;
QColor calcNewPixelColor(const QImage& img, int x, int y) const override;
};
/*Median Filter*/
class MedianKernel : public Kernel
{
public:
using Kernel::Kernel;
MedianKernel(std::size_t radius = 1) : Kernel(radius)
{
int signed_radius = static_cast<int>(radius);
for (int i = -signed_radius; i <= signed_radius; i++)
{
for (int j = -signed_radius; j <= signed_radius; j++)
{
std::size_t idx = (i + radius) * getSize() + (j + radius);
data[idx] = 1;
}
}
}
};
class MedianFilter : public MatrixFilter
{
public:
MedianFilter(std::size_t radius = 2) : MatrixFilter(MedianKernel(radius)) {}
QColor calcNewPixelColor(const QImage& img, int x, int y) const override;
};
/*Opening*/
/*erosion -> dilation*/
class OpeningFilter
{
public:
QImage process(const QImage& img) const
{
DilationFilter dilation(2);
ErosionFilter erosion(2);
QImage result;
QImage temp;
temp = erosion.process(img);
result = dilation.process(temp);
return result;
}
};
/*Opening*/
/*dilation -> erosion*/
class ClosingFilter
{
public:
QImage process(const QImage& img) const
{
DilationFilter dilation(2);
ErosionFilter erosion(2);
QImage result;
QImage temp;
temp = dilation.process(img);
result = erosion.process(temp);
return result;
}
};
/*Top hat*/
class TopHatFilter
{
public:
QImage process(const QImage& img) const;
};<file_sep>#include <QtCore/QCoreApplication>
#include <QImage>
#include <iostream>
#include <cstdlib>
#include "Filters.h"
using namespace std;
int main(int argc, char* argv[])
{
srand(1);
string str;
QImage img;//класс QImage позволяет удобный инерфейс для работы с пикселями и предоставляет
//методы обработки цвета пикселей и т.д.
for (int i = 0; i < argc; i++)//находим адрес картинки
{
if (!strcmp(argv[i], "-p") && (i + 1 < argc))
{
str = argv[i + 1];
}
}
img.load(QString(str.c_str()));//преобразовываем полученный адрес из формата string в c-строку
//и преобразуем её в тип QString
//Класс QString предоставляет символьную строку Unicode.
// img.save("Images\\Source1.png");//сохраняем картинку в папку Images
//InvertFilter invert;
// invert.process(img).save(QString("Images\\Invert1.png"));
// BlurFilter blur;
// blur.process(img).save(QString("Images\\Blur2.png"));
/* GaussianFilter gauss;
gauss.process(img).save(QString("Images\\Gauss1.png"));*/
/* GrayScaleFilter gray_scale;
gray_scale.process(img).save(QString("Images\\GreyScale1.png"));*/
/* SepiaFilter sepia;
sepia.process(img).save(QString("Images\\Sepia1.png"));*/
//IntensityUP intensity_up;
// intensity_up.process(img).save(QString("Images\\IntensityUP1.png"));
SobelFilter sobel;
GrayScaleFilter gray_scale;
QImage sbl;
sbl =sobel.process(img);
gray_scale.process(sbl).save(QString("Images\\Sobel1_grey.png"));
//SharpnessUPFilter sharpness;;
// sharpness.process(img).save(QString("Images\\Sharpness1.png"));
/* GreyWorldFilter grey_world;
grey_world.process(img).save(QString("Images\\GreyWorld1.png"));*/
//HistugrammFilter hist;
//hist.process(img).save(QString("Images\\Hist5.png"));
/* ShiftFilter shift;
shift.process(img).save(QString("Images\\Shift1.png"));*/
//GlassFilter glass;
//glass.process(img).save(QString("Images\\Glass1.png"));
//MotionBlurFilter motion_blur(4);
//motion_blur.process(img).save(QString("Images\\MotionBlur1.png"));
/*UltraSharpnessFilter ultra_sharpness;
ultra_sharpness.process(img).save(QString("Images\\`UltraSharpness1.png"));*/
/* BlackWhiteFilter bw;
bw.process(img).save(QString("Images\\BlackWhiteFilter.png"));*/
/* DilationFilter dilation(3);
dilation.process(img).save(QString("Images\\Dilation6.png"));*/
//ErosionFilter erosion(2);
//erosion.process(img).save(QString("Images\\Erosion6.png"));
/* MedianFilter median(5);
median.process(img).save(QString("Images\\Median1.png")); */
/* OpeningFilter opening;
opening.process(img).save(QString("Images\\Opening6_inv_noi.png")); */
/*ClosingFilter closing;
closing.process(img).save(QString("Images\\Opening6_noi.png"));*/
//TopHatFilter top_hat;
//top_hat.process(img).save(QString("Images\\TopHat7.png"));
return 0;
}
<file_sep>#include "Filters.h"
<file_sep>#include "Filters.h"
#include<math.h>
template <class T>
T clamp(T value, T max, T min)
{
//если значение больше максимального - возвращаем максимально,если меньше минимального - минимальное
if (value > max)
{
return max;
}
if (value < min)
{
return min;
}
//если в допустимом диапазоне,то возвращаем без изменений
return value;
}
//обработка всего изображения (для всех фильтров одинаковая)
QImage Filter::process(const QImage& img) const
{
QImage result(img);//создаём переменную-картинку-результат
//проходим каждый пикслей в цикле
for (int x = 0; x < img.width(); x++)
{
for (int y = 0; y < img.height(); y++)
{
//создаём "переменную-результат"-"color" работы функции обработки цвета текущего пикселя
//(в новой картинке)
QColor color = calcNewPixelColor(img, x, y);
//в результирующей картинке устанавливаем этот пиксель(x,y) в новый цвет color
result.setPixelColor(x, y, color);
}
}
//возвращаем полученное изображение
return result;
}
//расчёт нового цвета пикселя для фильтра "Инверсия"
QColor InvertFilter::calcNewPixelColor(const QImage& img, int x, int y) const
{
//берём значения цвета текущего пикселя
QColor color = img.pixelColor(x, y);
//устанавливаем новые значения r g b (меняем цвет на противоположный)
color.setRgb(255 - color.red(), 255 - color.green(), 255 - color.blue());
return color;//возвращаем полученный новый цвет пикселя
}
//Матричный фильтр::расчёт нового цвета пикселя
QColor MatrixFilter::calcNewPixelColor(const QImage& img, int x, int y) const // x,y -координаты текущего пикселя
{
//результаты расчёта цвета
float returnR = 0;
float returnG = 0;
float returnB = 0;
int size = mKernel.getSize();//получаем размер
int radius = mKernel.getRadius();//радиус
//Переменные i и j принимают значения от -radius
//до radius и означают положение точки в матрице ядра, если начало отсчета
//поместить в центр матрицы,т.е. если i =0 и j=0 это центр,то i=-redius,j=-redius это левый верхний угол
for (int i = -radius; i <= radius; i++)//проходим по ядру и высчитываем новые r g b
{
for (int j = -radius; j <= radius; j++)
{
//В переменной idx хранятся координаты пикселя –
//соседа пикселя x, y, для которого происходит вычисления цвета
//при этом осуществляется пересчёт в систему ,где отчёт ведётся от левого верхнего угла
int idx = (i + radius) * size + j + radius;//например центр(i=0,j=0) перейдёт в 5 ,если радиус равен 1
//получаем значения r g b текущего обрабатываемого пикселя, при этом проверяем чтобы координаты положения пикселя были в пределах картинки
//для ширины и высоты отдельно
QColor color = img.pixelColor(clamp<float>(x + j, img.width() - 1, 0), clamp<float>(y + i, img.height() - 1, 0));
//высчитываем новые значения цветов
//для каждого цвета r g b
//умножаем текущие значения на цветовой коэффициент
returnR += color.red() * mKernel[idx];
returnG += color.green() * mKernel[idx];
returnB += color.blue() * mKernel[idx];
}
}
//возвращаем полученный цвет
return QColor(clamp<float>(returnR, 255.f, 0.f), clamp<float>(returnG, 255.f, 0.f), clamp<float>(returnB, 255.f, 0.f));
}
QColor GrayScaleFilter::calcNewPixelColor(const QImage& img, int x, int y) const
{
//берём значения цвета текущего пикселя
QColor color = img.pixelColor(x, y);
//расчитываем яркость(интенсивность)
float intensity = 0.299 * color.red() + 0.587 * color.green() + 0.144 * color.blue();
//устанавливаем во все каналы полученное значение
color.setRgb(clamp<float>(intensity, 255.f, 0.f), clamp<float>(intensity, 255.f, 0.f), clamp<float>(intensity, 255.f, 0.f));
return color;
}
QColor SepiaFilter::calcNewPixelColor(const QImage& img, int x, int y) const
{
//берём значения цвета текущего пикселя
QColor color = img.pixelColor(x, y);
//расчитываем яркость(интенсивность)
float intensity = 0.299 * color.red() + 0.587 * color.green() + 0.144 * color.blue();
float k = 58;
//устанавливаем во все каналы полученное значение
color.setRgb(clamp<float>(intensity + 2 * k, 255.f, 0.f), clamp<float>(intensity + 0.5 * k, 255.f, 0.f), clamp<float>(intensity - 1 * k, 255.f, 0.f));
return color;
}
QColor IntensityUP::calcNewPixelColor(const QImage& img, int x, int y) const
{
//берём значения цвета текущего пикселя
QColor color = img.pixelColor(x, y);
float constanta = 58;
//устанавливаем во все каналы полученное значение
color.setRgb(clamp<float>(color.red()+ constanta, 255.f, 0.f), clamp<float>(color.green()+ constanta, 255.f, 0.f), clamp<float>(color.blue() + constanta, 255.f, 0.f));
return color;
}
QColor SobelFilter::calcNewPixelColor(const QImage& img, int x, int y) const // x,y -координаты текущего пикселя
{
//результаты расчёта цвета
float returnR = 0;
float returnG = 0;
float returnB = 0;
float GyR=0, GyG=0, GyB=0;
float GxR=0, GxG=0, GxB=0;
int size = mKernel.getSize();//получаем размер
int radius = mKernel.getRadius();//радиус
for (int i = -radius; i <= radius; i++)//проходим по ядру и высчитываем новые r g b
{
for (int j = -radius; j <= radius; j++)
{
//В переменной idx хранятся координаты пикселя –
//соседа пикселя x, y, для которого происходит вычисления цвета
//при этом осуществляется пересчёт в систему ,где отчёт ведётся от левого верхнего угла
int idxY = (i + radius) * size + j + radius;//например центр(i=0,j=0) перейдёт в 5 ,если радиус равен 1
int idxX = (j + radius) * size + i + radius;
//получаем значения r g b текущего обрабатываемого пикселя, при этом проверяем чтобы координаты положения пикселя были в пределах картинки
//для ширины и высоты отдельно
//берём цвет пикселя
QColor color = img.pixelColor(clamp<float>(x + j, img.width() - 1, 0), clamp<float>(y + i, img.height() - 1, 0));
//считаем переходы по вертикали
GyR += color.red() * mKernel[idxY];
GyG += color.green() * mKernel[idxY];
GyB += color.blue() * mKernel[idxY];
//считаем переходы по горизонтали
GxR += color.red() * mKernel[idxX];
GxG += color.green() * mKernel[idxX];
GxB += color.blue() * mKernel[idxX];
}
}
returnR = std::sqrt(GyR * GyR + GxR * GxR);
returnG = std::sqrt(GyG * GyG + GxG * GxG);
returnB = std::sqrt(GyB * GyB + GxB * GxB);
//возвращаем полученный цвет
return QColor(clamp<float>(returnR, 255.f, 0.f), clamp<float>(returnG, 255.f, 0.f), clamp<float>(returnB, 255.f, 0.f));
}
void GreyWorldFilter::mediumIntensities(const QImage& img)
{
int size = img.width()*img.height();//количество пикселей в картинке
float accum_R = 0;
float accum_G = 0;
float accum_B = 0;
for (int x = 0; x < img.width(); x++)
{
for (int y = 0; y < img.height(); y++)
{
QColor color=img.pixelColor(clamp<float>(x, img.width() - 1, 0), clamp<float>(y, img.height() - 1, 0));
accum_R += color.red();
accum_G += color.green();
accum_B += color.blue();
}
}
med_R = accum_R / size;
med_G = accum_G / size;
med_B = accum_B / size;
avg = (med_R + med_G + med_B) / 3;
//return QColor(clamp<float>(med_R, 255.f, 0.f), clamp<float>(med_G, 255.f, 0.f), clamp<float>(med_B, 255.f, 0.f));
}
QColor GreyWorldFilter::calcNewPixelColor(const QImage& img, int x, int y) const
{
//берём значения цвета текущего пикселя
QColor color = img.pixelColor(x, y);
//устанавливаем во все каналы полученное значение
color.setRgb(clamp<float>(color.red()*avg/med_R, 255.f, 0.f), clamp<float>(color.green() * avg / med_G, 255.f, 0.f), clamp<float>(color.blue() * avg / med_B, 255.f, 0.f));
return color;
}
QImage GreyWorldFilter::process(const QImage& img)
{
mediumIntensities(img);
QImage result(img);//создаём переменную-картинку-результат
//проходим каждый пикслей в цикле
for (int x = 0; x < img.width(); x++)
{
for (int y = 0; y < img.height(); y++)
{
//создаём "переменную-результат"-"color" работы функции обработки цвета текущего пикселя
//(в новой картинке)
QColor color = calcNewPixelColor(img, x, y);
//в результирующей картинке устанавливаем этот пиксель(x,y) в новый цвет color
result.setPixelColor(x, y, color);
}
}
//возвращаем полученное изображение
return result;
}
QImage HistugrammFilter::process(const QImage& img)
{
intensities_range_calc(img);
QImage result(img);//создаём переменную-картинку-результат
//проходим каждый пикслей в цикле
for (int x = 0; x < img.width(); x++)
{
for (int y = 0; y < img.height(); y++)
{
//создаём "переменную-результат"-"color" работы функции обработки цвета текущего пикселя
//(в новой картинке)
QColor color = calcNewPixelColor(img, x, y);
//в результирующей картинке устанавливаем этот пиксель(x,y) в новый цвет color
result.setPixelColor(x, y, color);
}
}
//возвращаем полученное изображение
return result;
}
QColor HistugrammFilter::calcNewPixelColor(const QImage& img, int x, int y) const
{
//берём значения цвета текущего пикселя
QColor color = img.pixelColor(x, y);
//устанавливаем во все каналы полученное значение
float intensity = 0, intensity_tmp = 0;
intensity_tmp = 0.299 * color.red() + 0.587 * color.green() + 0.144 * color.blue();
intensity = (intensity_tmp - intensity_min) * (255 - 0) / (intensity_max - intensity_min);
color.setRgb(clamp<float>(intensity, 255.f, 0.f), clamp<float>(intensity, 255.f, 0.f), clamp<float>(intensity, 255.f, 0.f));
return color;
}
void HistugrammFilter::intensities_range_calc(const QImage& img)
{
float min_int = 0, max_int = 0;
float tmp_intens;
for (int x = 0; x < img.width(); x++)
{
for (int y = 0; y < img.height(); y++)
{
QColor color = img.pixelColor(clamp<float>(x, img.width() - 1, 0), clamp<float>(y, img.height() - 1, 0));
tmp_intens = 0.299 * color.red() + 0.587 * color.green() + 0.144 * color.blue();
if (tmp_intens > max_int)
{
max_int = tmp_intens;
}
if (tmp_intens < min_int)
{
min_int = tmp_intens;
}
}
}
intensity_max = max_int;
intensity_min = min_int;
}
QImage LocationFilter::process(const QImage& img) const
{
QImage result(img);//создаём переменную-картинку-результат
//проходим каждый пикслей в цикле
for (int x = 0; x < img.width(); x++)
{
for (int y = 0; y < img.height(); y++)
{
QColor color = img.pixelColor(x, y);//считываем цвет текущего пиксле (x,y) исходного изображения
KL result_kl = calcNewPixelLocation(x, y);//высчитываем его положение в новой картинке (x,y) - > (k,l)
QColor black_color;
black_color.setRgb(0, 0, 0);
//больше границ
if (result_kl.k >= img.width())
{
result_kl.k = result_kl.k % img.width();
color = black_color;
}
if (result_kl.k < 0)
{
result_kl.k = img.width() + result_kl.k;//result_kl.k<0
color = black_color;
}
//меньше границ
if (result_kl.l >= img.height())
{
result_kl.l = result_kl.l % img.height();
color = black_color;
}
if (result_kl.l < 0)
{
result_kl.l = img.height() + result_kl.l;//result_kl.l<0
color = black_color;
}
//записываем в полученное местоположение цвет текущего пикселя
result.setPixelColor(result_kl.k, result_kl.l, color);
}
}
//возвращаем полученное изображение
return result;
}
///*
// 𝑥(𝑘, 𝑙) = 𝑘 + 50;
// 𝑦(𝑘, 𝑙) = 𝑙;
//*/
LocationFilter::KL ShiftFilter::calcNewPixelLocation(int x, int y) const
{
KL result;
result.k = x - 50;
result.l = y + 50;
return result;
}
QImage GlassFilter::process(const QImage& img) const
{
QImage result(img);//создаём переменную-картинку-результат
//проходим каждый пикслей в цикле
for (int x = 0; x < img.width(); x++)
{
for (int y = 0; y < img.height(); y++)
{
//QColor color = img.pixelColor(x, y);//считываем цвет текущего пиксле (x,y) исходного изображения
KL result_kl = calcNewPixelLocation(x, y);//высчитываем откуда брать значение для текущего пикселя (x,y)
//Если вышли за границы картинки ,то оставляем пиксель без изменений
if (result_kl.k >= img.width())
{
result_kl.k = x;
}
if (result_kl.k <= 0)
{
result_kl.k =x;//result_kl.k<0
}
//меньше границ
if (result_kl.l >= img.height())
{
result_kl.l =y;
}
if (result_kl.l <= 0)
{
result_kl.l =y;//result_kl.l<0
}
//после того как высчитали координаты пикселя от которого будем брать цвет
QColor color= img.pixelColor(result_kl.k, result_kl.l);
//записываем полученный цвет в наш текущий пиксель
result.setPixelColor(x, y, color);
}
}
//возвращаем полученное изображение
return result;
}
/*
𝑥(𝑘, 𝑙) = 𝑘 + (𝑟𝑎𝑛𝑑(1) – 0.5) ∗ 10;
𝑦(𝑘, 𝑙) = 𝑙 + (𝑟𝑎𝑛𝑑(1) – 0.5) ∗ 10;
*/
LocationFilter::KL GlassFilter::calcNewPixelLocation(int x, int y) const
{
KL result;
result.k = x- (float(rand()%101)/100-0.5)*10;
result.l = y -(float(rand()%101)/100-0.5)*10;
return result;
}
QColor BlackWhiteFilter::calcNewPixelColor(const QImage& img, int x, int y) const
{
float intensity_max, intensity_min;
intensity_max = 0.299 * 255 + 0.587 * 255 + 0.144 * 255;
//берём значения цвета текущего пикселя
QColor color = img.pixelColor(x, y);
QColor black_color;
black_color.setRgb(0, 0, 0);
QColor white_color;
white_color.setRgb(255, 255, 255);
//расчитываем яркость(интенсивность)
float intensity = 0.299 * color.red() + 0.587 * color.green() + 0.144 * color.blue();
if (intensity> intensity_max/(2.5f))
{
color = white_color;
}
else
{
color = black_color;
}
//устанавливаем во все каналы полученное значение
color.setRgb(clamp<float>(color.red(), 255.f, 0.f), clamp<float>(color.green(), 255.f, 0.f), clamp<float>(color.blue(), 255.f, 0.f));
return color;
}
QImage DilationFilter::process(const QImage& img) const
{
QImage result(img);//создаём переменную-картинку-результат
int mask_width=1, mask_height = 1;
int mask_radius = 0;
mask_radius = mKernel.getRadius();
//img.convertToFormat(QImage::Format_Mono);
for (int x = mask_radius; x < img.width()- mask_radius ; x++)
{
for (int y = mask_radius; y < img.height()- mask_radius; y++)
{
//создаём "переменную-результат"-"color" работы функции обработки цвета текущего пикселя
//(в новой картинке)
QColor color = calcNewPixelColor(img, x, y);
//в результирующей картинке устанавливаем этот пиксель(x,y) в новый цвет color
result.setPixelColor(x, y, color);
}
}
return result;
}
QColor DilationFilter::calcNewPixelColor(const QImage& img, int x, int y) const
{
QColor result;
int max=0;
int size = mKernel.getSize();//получаем размер
int radius = mKernel.getRadius();//радиус
for (int i = -radius; i < radius; i++)
{
for (int j = -radius; j < radius; j++)
{
QColor tmp_color = img.pixelColor(x+i, y+j);
float tmp_intensity = 0;
int idx = (i + radius) * size + j + radius;
if ((mKernel[idx]) && (tmp_color.red() > max)&& (tmp_color.green() > max)&& (tmp_color.blue() > max))
{
max = 255;
}
}
}
result.setRgb(clamp<float>(max, 255.f, 0.f), clamp<float>(max, 255.f, 0.f), clamp<float>(max, 255.f, 0.f));
return result;
}
/**/
QImage ErosionFilter::process(const QImage& img) const
{
QImage result(img);//создаём переменную-картинку-результат
int mask_width = 1, mask_height = 1;
int mask_radius = 0;
mask_radius = mKernel.getRadius();
//img.convertToFormat(QImage::Format_Mono);
for (int x = mask_radius; x < img.width() - mask_radius; x++)
{
for (int y = mask_radius; y < img.height() - mask_radius; y++)
{
//создаём "переменную-результат"-"color" работы функции обработки цвета текущего пикселя
//(в новой картинке)
QColor color = calcNewPixelColor(img, x, y);
//в результирующей картинке устанавливаем этот пиксель(x,y) в новый цвет color
result.setPixelColor(x, y, color);
}
}
return result;
}
QColor ErosionFilter::calcNewPixelColor(const QImage& img, int x, int y) const
{
QColor result;
int min = 255;
int size = mKernel.getSize();//получаем размер
int radius = mKernel.getRadius();//радиус
for (int i = -radius; i < radius; i++)
{
for (int j = -radius; j < radius; j++)
{
QColor tmp_color = img.pixelColor(x + i, y + j);
float tmp_intensity = 0;
int idx = (i + radius) * size + j + radius;
if ((mKernel[idx]) && (tmp_color.red() < min) && (tmp_color.green() < min) && (tmp_color.blue() < min))
{
min = 0;
}
}
}
result.setRgb(clamp<float>(min, 255.f, 0.f), clamp<float>(min, 255.f, 0.f), clamp<float>(min, 255.f, 0.f));
return result;
}
/*Median FIlter*/
void sortcolor(float color[], int Ind)
{
int tmp = 0;
for (int i = 0; i < Ind; i++)
for (int j = (Ind - 1); j >= (i + 1); j--)
if (color[j] < color[j - 1])
{
tmp = color[j];
color[j] = color[j - 1];
color[j - 1] = tmp;
}
};
float median(float color[], int Ind)
{
int Indx = Ind / 2;
return color[Indx];
};
QColor MedianFilter::calcNewPixelColor(const QImage& img, int x, int y) const
{
float returnR = 0;
float returnG = 0;
float returnB = 0;
int size = mKernel.getSize();
int radius = mKernel.getRadius();
float* colorR = new float[size * size];
float* colorG = new float[size * size];
float* colorB = new float[size * size];
for (int i = -radius; i <= radius; i++)
{
for (int j = -radius; j <= radius; j++)
{
int idx = (i + radius) * size + j + radius;
QColor color = img.pixelColor(clamp(x + j, img.width() - 1, 0), clamp(y + i, img.height() - 1, 0));
colorR[idx] = float(color.red()) * mKernel[idx];
colorG[idx] = float(color.green()) * mKernel[idx];
colorB[idx] = float(color.blue()) * mKernel[idx];
}
}
sortcolor(colorR, size * size);
sortcolor(colorG, size * size);
sortcolor(colorB, size * size);
//returnR += color.red() * mKernel[idx];
//returnG += color.green() * mKernel[idx];
//returnB += color.blue() * mKernel[idx];
return QColor(clamp(median(colorR, size * size), 255.f, 0.f), clamp(median(colorG, size * size), 255.f, 0.f),
clamp(median(colorB, size * size), 255.f, 0.f));
};
QImage TopHatFilter::process(const QImage& img) const
{
ErosionFilter erosion(3);
QImage result(img);
QImage temp;
temp = erosion.process(img);
for (int x = 0; x < img.width(); x++)
{
for (int y = 0; y < img.height(); y++)
{
QColor color, color_a, color_ero;
int colorR = 0, colorB = 0, colorG = 0;
color_a = img.pixelColor(x, y);//цвет пикселя исходной картинки
color_ero = temp.pixelColor(x, y);//цвет пикселя картинки после эрозии
//находим разность этих цветов по каналам
colorR = color_a.red() - color_ero.red();
colorB = color_a.blue() - color_ero.blue();
colorG = color_a.green() - color_ero.green();
color.setRgb(clamp<float>(colorR, 255.f, 0.f), clamp<float>(colorB, 255.f, 0.f), clamp<float>(colorG, 255.f, 0.f));
//в результирующей картинке устанавливаем этот пиксель(x,y) в новый цвет color
result.setPixelColor(x, y, color);
}
}
return result;
}
|
ec44277b9e2cc23c2a7871493357f267f42ca58f
|
[
"C++"
] | 4 |
C++
|
Gekata-2/Computer-Graphics
|
9c3f7a5913e637d7826b619af7a41df6f4b08eb0
|
29cc3202caea08117985565655b2088452a51c01
|
refs/heads/master
|
<file_sep>class Address < ApplicationRecord
validates :city, presence: true, format: { with: /\A[a-zA-Z]+\z/ }
validates :state, presence: true, format: { with: /\A[a-zA-Z]+\z/ }
validates :pincode, presence: true, format: { with: /\A[0-9]+\z/ }, length: { is: 6 }
end
<file_sep>class User < ApplicationRecord
validates :full_name, presence: true, format:{with: /\A[a-zA-z]+ [a-zA-z]+\z/, message: 'Invalid Name' }
validates :email, presence: true, uniqueness: true, format: {with:/\A[A-Za-z0-9\W]+*@[A-Za-z0-9\W]+[.][a-z]+\z/, message: 'Invalid Email' }
validates :password, presence: true, format: {with: /(?=.*[a-z]+)(?=.*[A-Z]+)(?=.*[0-9]+)(?=.*\W+)/, message: 'Invalid Password' }
validates :gender, presence: true, format: {with: /\AMale\z|\AFemale\z/, message: 'gender can be Male or Female ' }
validates :dob, presence: true
end
<file_sep>class Post < ApplicationRecord
validates :title, presence: true, length: { minimum: 2, maximum: 60 }
validates :content, presence: true
end
|
e234774628974ce878e76d4675c49903248c364b
|
[
"Ruby"
] | 3 |
Ruby
|
Priyanka4328/Railsapp
|
37a37047c834a9cbdb9439300a325f76029cdb09
|
9893df71881570b97cc6f9ad2fce019deb321540
|
refs/heads/master
|
<repo_name>xoBlair/growler<file_sep>/app.rb
require 'sinatra'
require 'sinatra/reloader'
enable :sessions
# before do
# end
@@growl_array = []
get '/' do
@@test_var = "test string "
erb :profile
end
post '/' do
# greeting = params[:greeting] || "Hi There"
# name = params[:name] || "Nobody"
@post = params[:post1]
@@growl_array.push(@post)
erb :profile
end
post '/reset' do
@@growl_array=[]
redirect '/'
end
get '/foo' do
session[:blair] = 'Hello blair'
person = "blair"
"other message"
# redirect to('/bar')
end
get '/bar' do
session[:blair] # => 'Hello World!'
end
|
5966899566176189a15b0c93e7eb944d1a0f4182
|
[
"Ruby"
] | 1 |
Ruby
|
xoBlair/growler
|
2849fabb785b8e3dcf995bcf2dbd546d0215c78f
|
36e663abf80cb45b5fb0d2b5c2285e427c9d6ea6
|
refs/heads/master
|
<file_sep><?php
/// RENAME TO config.php
/// Where are the pictures located?
define('PHOTOS_PATH', 'photos/');
define('THUMB_PATH', '.thumb');
define('PHOTO_PILE_COUNT', 5);
define('PHOTO_SIZE', 175);
<file_sep><?php
require 'functions.php';
write_header();
if (!isset($_REQUEST['dir']) || $_REQUEST['dir'] == "" || substr($_REQUEST['dir'], 0, 1) == "." || !is_dir(__DIR__ . '/' . PHOTOS_PATH . $_REQUEST['dir'])) {
$_REQUEST['dir'] = "";
}
echo '<div class=gallery>';
if ($_REQUEST['dir'] !== "") {
show_back(PHOTOS_PATH . $_REQUEST['dir']);
}
list_dirs(get_dirs($_REQUEST['dir']));
list_photos(get_images(PHOTOS_PATH . $_REQUEST['dir']));
echo '</div>';
<file_sep>window.onload = load_page;
recent_hash = "";
function load_page()
{
check_hash();
}
function check_hash()
{
if (recent_hash != window.location.hash) {
recent_hash = window.location.hash;
if (recent_hash.length > 1) {
eval(recent_hash.substr(1));
}
}
setTimeout(check_hash, 40);
}
function fade_obj(obj, opacity, step, speed, run_at_end, test)
{
opacity += step;
obj.style.opacity = opacity / 100;
if (test(opacity)) {
run_at_end();
} else {
setTimeout(function ()
{
fade_obj(obj, opacity, step, speed, run_at_end, test);
}, speed);
}
}
function show(img_src)
{
new_img = document.createElement("img");
new_img.style.visibility = "hidden";
new_img.src = img_src;
new_img.className = "photo";
new_img.title = "Click to close; right click to save";
new_img.onclick = function()
{
set_up_fade(new_img, 100, 0, -15, 30, function ()
{
window.location.hash = "//";
document.body.removeChild(new_img);
});
};
new_img.onload = function()
{
this.style.left = ((get_innerWidth(window, document) / 2) - (parseInt(this.width) / 2)) + "px";
this.style.opacity = 0;
set_up_fade(new_img, 0, 100, 15, 30, function () {});
this.style.visibility = "visible";
};
new_img.style.MozTransform = "rotate(" + (Math.random() * 11 - 5) + "deg)";
new_img.style.WebkitTransform = new_img.style.MozTransform;
document.body.appendChild(new_img);
}
function get_innerWidth(win, doc) {
if (typeof win.innerWidth != "undefined") {
return win.innerWidth;
} else {
return doc.body.clientWidth;
}
}
function set_up_fade(obj, start_op, stop_op, step, speed, run_at_end)
{
setTimeout(function ()
{
fade_obj(obj, start_op, step, speed, run_at_end, function (cur_opacity)
{
if (step < 0) {
return cur_opacity <= stop_op;
} else {
return cur_opacity >= stop_op;
}
});
}, speed);
}<file_sep><?php
if (!defined("__DIR__")) {
define("__DIR__", getcwd());
}
require_once "config.php";
/// Global variables
$thumb_start = '';
$thumb_top1 = '<div class=photo_div><a href="#show(\'';
$thumb_top2 = '\');"><span> </span><img class=background src=".images/polaroid.png"><img class=thumb src="';
$thumb_middle = '"><em>';
$thumb_bottom = "</em></a></div>\n";
$thumb_end = "";
///TEMP GLOBAL VAR
$_REQUEST['arrange'] = 'date';
function write_header()
{
?>
<!DOCTYPE html>
<html><head>
<title>Photos</title>
<style type="text/css">
html {
font-family: PisanNormal, sans;
}
img {
border: none;
}
.photo {
max-height: 90%;
max-width: 90%;
position: fixed;
z-index: 2147483647;
border-radius: 20px;
-webkit-border-radius: 20px;
-moz-box-shadow: 0px 0px 5px #000;
box-shadow: 0px 0px 5px #000;
-webkit-box-shadow: 0px 0px 5px #000;
/* For FF 3.5 to do curves better on an image. */
clip-path: url(.images/resources.xml#c1);
cursor: pointer;
cursor: hand;
}
a {
text-decoration: none;
color: #444;
}
/* Start of Picture Pile Style */
.pic_pile {
position: absolute;
border: #FFF 2px solid;
-moz-box-shadow: 0px 0px 5px #333;
box-shadow: 0px 0px 5px #333;
-webkit-box-shadow: 0px 0px 5px #333;
background: #FFF;
}
.folder {
position: absolute;
padding-top: 45px;
}
.front {
z-index: 99;
}
.dir {
float: left;
width: 200px!important;
height: 250px;
}
.label {
position: relative;
padding-top: 105px;
width: 80%;
z-index:999;
text-align: center;
color: #444;
-moz-transform: rotate(-6deg);
-webkit-transform: rotate(-6deg);
font-size: 25px;
}
/* End of Picture Pile Styles */
/* Start of Pictures Styles */
body {
margin: 20px auto;
padding: 0;
background: url(.images/cork-bg.png);
}
.gallery {
margin: 0 4px 0 4px;
}
.gallery .photo_div {
height: 250px;
width: 200px;
float: left;
text-align: center;
}
.gallery .photo_div a {
padding-bottom: 8px;
/*background: url(.images/polaroid.png) no-repeat;
background-size: 100%;*/
float: left;
position: relative;
text-align: center;
margin: 10px;
/*background: url(.images/loader.gif) center no-repeat;*/
}
.gallery .background {
position: absolute;
z-index: -1;
width: 100%;
height: 100%;
}
.gallery .thumb {
padding: 7px;
/*opacity:.7;*/
}
.gallery span {
background: url(.images/tape.png) no-repeat center;
display: block;
position: absolute;
top: -5px;
width:100%;
}
.gallery em {
display: block;
text-align: center;
font-size: 20px;
color: #444;
font-style: normal;
}
@font-face {
font-family: 'PisanNormal';
src: url('.fonts/PISAN.eot');
src: local('Pisan'), url('.fonts/PISAN.TTF.woff') format('woff'), url('.fonts/PISAN.ttf') format('truetype'), url('.fonts/PISAN.svg#webfont') format('svg');
font-weight: normal;
font-style: normal;
}
/* End of picture stlyes */
</style>
<script src=main.js></script>
</head><body>
<?php
}
function escape_single_quotes($str)
{
if (substr(PHP_OS, 0, 3) === 'WIN') {
///FIXME: How to do this on Windows?
return $str;
} else {
return str_replace("\\'", "'\\''", $str);
}
}
function beautify_name($name)
{
$name = str_replace("_", " ", $name);
$tmp_name = $name;
/// Reorder dates and add slashes.
$name = preg_replace('/^([1-2]\d\d\d)([01]\d)([0-3]\d)?$/', '$2/$3/$1', $name);
/// If the day is left out, it will produce two consecutive slashes, so one must be removed.
if ($name !== $tmp_name) {
$name = str_replace("//", "/", $name);
}
return wordwrap(htmlentities(title_case($name)), 22, "<br>\n", true);
}
function show_back($starting_dir)
{
create_picture_pile(dirname($starting_dir), '<- Go back <br><small>(' . beautify_name(basename(dirname($starting_dir))) . ')</small>', false);
}
function get_dirs($dir_path)
{
///DATE CODE
$debug = false;
if ($_REQUEST['arrange'] == 'date' && $debug) {
die($dir_path);
} else {
$dirs = glob(__DIR__ . '/' . PHOTOS_PATH . $dir_path . '*', GLOB_ONLYDIR);
foreach ($dirs as &$value) {
$value = substr($value, strlen(__DIR__ . '/'));
}
return $dirs;
}
}
function list_dirs($dirs)
{
if (count((array)$dirs) == 0) return null;
foreach ($dirs as $dir) {
create_picture_pile($dir);
}
}
function create_picture_pile($dir, $dir_name = "", $beautify = true)
{
if ($dir_name == "") $dir_name = basename($dir);
$dir_images = get_images($dir . '/');
$dir_path = substr($dir, strlen(PHOTOS_PATH)) . '/';
if ($dir_path == "/" || $dir_path == "") {
$url = '';
} else {
$url = 'dir=' . urlencode($dir_path);
}
echo '<a href="?' . $url . '">';
echo '<div class=dir>';
echo '<img src=".images/folder-yellow-back.png" class="folder back">';
echo '<img src=".images/folder-yellow-front.png" class="folder front">';
for ($i = 0; $i < PHOTO_PILE_COUNT; ++$i) {
$rand_key = array_rand($dir_images);
//echo find_thumb($dir_images[$rand_key]);
//echo '<img src="' . find_thumb($dir_images[$rand_key]) . '" class="pic_pile" style="-moz-transform: rotate(' . round(mt_rand(-6, 6)) . 'deg); left: ' . (($i) * PHOTO_SIZE * -1) . 'px">';
if (isset($dir_images[$rand_key])) {
$rotate = round(mt_rand(-6, 6));
echo '<img src="' . htmlentities(find_thumb($dir_images[$rand_key])) . '" class="pic_pile" style="-moz-transform: rotate(' . $rotate . 'deg);-webkit-transform: rotate(' . $rotate . 'deg);">';
}
}
echo '<div class=label>' . ($beautify ? beautify_name($dir_name) : $dir_name). '</div>';
echo "</div>";
echo "</a>\n";
}
function get_images($dir)
{
$imgs = glob(__DIR__ . '/' . $dir . '*.{j,p,g,J,P,G}{p,n,i,P,N,I}{g,f,G,F}', GLOB_BRACE);
foreach ($imgs as &$value) {
$value = substr($value, strlen(__DIR__ . '/'));
}
return $imgs;
}
function list_photos($files)
{
global $thumb_start, $thumb_top1, $thumb_top2, $thumb_middle, $thumb_bottom, $thumb_end;
echo $thumb_start;
foreach ($files as $file) {
$thumb = find_thumb($file);
$thumb_top = $thumb_top1 . addslashes(htmlentities($file)) . $thumb_top2;
echo $thumb_top . htmlentities($thumb) . $thumb_middle . wordwrap(htmlentities(title_case(str_replace("_", " ", pathinfo_filename($file)))), 22, "<br>\n", true) . $thumb_bottom;
}
echo $thumb_end;
}
function find_thumb($file)
{
$path = pathinfo(__DIR__ . '/' . $file, PATHINFO_DIRNAME) . '/' . THUMB_PATH . '/';
$filename = pathinfo($file, PATHINFO_BASENAME);
if (!is_dir($path)) {
mkdir($path);
}
if (!file_exists($path . $filename)) {
create_thumb($file, $path . $filename);
}
return substr($path, strlen(__DIR__ . '/')) . $filename;
}
function create_thumb($original, $new_filename)
{
/// Attempt to create the thumbnail with ImageMagick.
///NOTE: It would be good to add a constant for the "convert" executable.
$res = shell_exec("convert '" . escape_single_quotes(addslashes(__DIR__ . '/' . $original)) . "' -thumbnail " . PHOTO_SIZE . "x" . PHOTO_SIZE . " '" . escape_single_quotes(addslashes($new_filename)) . "' 2>&1");
/// Did ImageMagick work?
if (!file_exists($new_filename)) {
/// Attempt to create the thumbnail with GD.
list($orig_width, $orig_height, $imagetype) = getimagesize(__DIR__ . '/' . $original);
$ratio_orig = $orig_width / $orig_height;
/// Is the image is wider than it is tall?
if ($orig_width > $orig_height) {
$maxwidth = PHOTO_SIZE;
$maxheight = round(PHOTO_SIZE / $ratio_orig);
} else {
$maxheight = PHOTO_SIZE;
$maxwidth = round(PHOTO_SIZE * $ratio_orig);
}
/// Create blank image for the thumbnail.
$thumb = imagecreatetruecolor($maxwidth, $maxheight);
$type_unknown = false;
if ($imagetype == 1) {
$source = imagecreatefromgif(__DIR__ . '/' . $original);
imagecopyresampled($thumb, $source, 0, 0, 0, 0, $maxwidth, $maxheight, $orig_width, $orig_height);
} elseif ($imagetype == 2) {
$source = imagecreatefromjpeg(__DIR__ . '/' . $original);
imagecopyresampled($thumb, $source, 0, 0, 0, 0, $maxwidth, $maxheight, $orig_width, $orig_height);
} elseif ($imagetype == 3) {
$source = imagecreatefrompng(__DIR__ . '/' . $original);
imagecopyresampled($thumb, $source, 0, 0, 0, 0, $maxwidth, $maxheight, $orig_width, $orig_height);
} else {
$type_unknown = true;
}
if (!$type_unknown) {
imagejpeg($thumb, $new_filename, 85);
}
}
}
function title_case($title)
{
$preps_articles_conjunctions = array('of', 'a', 'the', 'and', 'an', 'or', 'nor', 'but', 'is', 'if', 'then', 'else', 'when', 'at', 'from', 'by', 'on', 'off', 'for', 'in', 'out', 'over', 'to', 'into', 'with');
$words = explode(' ', $title);
$words_len = count($words);
foreach ($words as $key => $word) {
if ($key === 0 || $key === $words_len || !in_array($word, $preps_articles_conjunctions)) {
$words[$key] = ucwords(strtolower($word));
}
}
$newtitle = implode(' ', $words);
return $newtitle;
}
function pathinfo_filename($path)
{
/// PHP >= 5.2.0
if (defined('PATHINFO_FILENAME')) {
return pathinfo($path, PATHINFO_FILENAME);
} else {
$path_parts = pathinfo($path);
return substr($path_parts['basename'], 0, strlen($path_parts['basename']) - strlen($path_parts['extension']) - 1);
}
}
function get_file_date($filename)
{
if (!file_exists($filename)) {
return false;
}
$exif = exif_read_data($filename, 0, true);
if (isset($exif['EXIF']) && isset($exif['EXIF']['DateTimeOriginal']) && $exif['EXIF']['DateTimeOriginal'] != "") {
$filetime = strtotime($exif['EXIF']['DateTimeOriginal']);
} elseif (isset($exif['EXIF']) && isset($exif['EXIF']['DateTimeDigitized']) && $exif['EXIF']['DateTimeDigitized'] != "") {
$filetime = strtotime($exif['EXIF']['DateTimeDigitized']);
} else {
///TODO: Try PEL if exif_read_data() fails.
$filetime = filemtime($filename);
}
///NOTE: month_str is the month as a string, i.e., "January" instead of "01".
return Array('timestamp' => $filetime, 'year' => date('Y', $filetime), 'month' => date('m', $filetime), 'month_str' => date('F', $filetime));
}
|
98b480f887616383d1ade16bcfc7c512e04fd8b0
|
[
"JavaScript",
"PHP"
] | 4 |
PHP
|
nmrugg/Photos-Now
|
1ca2392c5ef7e4f73f9d05db84bdd66b0739d8af
|
baf0f1b48241f06f4852d0deb1ce9540ad234cb0
|
refs/heads/master
|
<repo_name>jiahao-shen/SDN-TE-SR-tools<file_sep>/parsers-generators/mod_string.py
import os
def modifica(b):
os.system("sed -i 's/perc = "+str(b)+"/perc = "+str(b+0.1)+"/g' flow_allocator_mod.py")
os.system("python flow_allocator_mod.py --controller 127.0.0.1:8080 --f graphml/Colt.graphml")<file_sep>/parsers-generators/test_load_network_utility.py
import networkx as nx
import random
CONFIDENCE=5
# L average length in bit
L = 1000
# Scale factor (Capacity and size are expressed in kb/s)
S = 1000
def Catalog_Generator(flussi_aggregati, epsilon, list_node, seed): # Ritorna una un dizionario di coppie di nodi di bordo
#epsilon variabile:
#size=epsilon=S*N/T
#S=Capacita' del flusso aggregato tra due nodi
#N=numero di coppie di nodi con domanda diversa da zero
#T=e' la somma di tutte le S (domanda aggregata)
flow_catalogue_test = {}
count = 0
random.seed(seed)
random.shuffle(list_node)
T=0
for i in flussi_aggregati:
T=T+i[2]
for node in list_node:
for i in flussi_aggregati:
if i[0]==node[0] and i[1]==node[1] and node[3]==False:
flow_catalogue_test[count]=(node[0],node[1],{'out':{'size': epsilon*(i[2])*len(flussi_aggregati)/float(T), 'allocated': False, 'srcPort': '', 'dstPort':'', "path": [], "type": "vll"}}) # epsilon variabile
#flow_catalogue_test[count]=(node[0],node[1],{'out':{'size': epsilon, 'allocated': False, 'srcPort': '', 'dstPort':'', "path": [], "type": "vll"}}) # epsilon costante
count = count + 1
return flow_catalogue_test
def multidigraph_from_flow_catalogue (fc_dict):
nx_flows = nx.MultiDiGraph()
for flow_id, (src, dst, flow_dict) in fc_dict.iteritems():
if 'out' in flow_dict and 'size' in flow_dict['out']:
nx_flows.add_edge(src, dst, flow_id, {'size':flow_dict['out']['size'], 'path':[]})
if 'in' in flow_dict and 'size' in flow_dict['in']:
nx_flows.add_edge(dst, src, flow_id, {'size':flow_dict['in']['size'], 'path':[]})
return nx_flows
def cspf(nx_topology, flow_catalogue, nx_flows, control, BIGK, list_node, Risultato_test):
for flow_id, (src, dst, flow_dict) in flow_catalogue.iteritems():
for i in list_node:
if i[0]==src and i[1]==dst and i[3]==False:
if 'out' in flow_dict and 'size' in flow_dict['out']:
work_nx_multidigraph = nx_topology.copy() # Create a working copy
size = flow_dict['out']['size']
prune_graph_by_available_capacity(nx_topology, size, list_node) # Se uso work_nx_multidigraph alla fine del ciclo ho la topologia completa in nx_topology (cosi' alla fine ho solo i link che non sono stati eliminati dal prune)
try:
path = nx.dijkstra_path(nx_topology, src, dst,'weight') # Se uso work_nx_multidigraph alla fine del ciclo ho la topologia completa in nx_topology (cosi' alla fine ho solo i link che non sono stati eliminati dal prune)
i[2]=i[2]+size
allocate_flow (nx_topology, path, size, "%s-out" % flow_id)
#store_path (nx_flows, src, dst, flow_id, path)
set_allocated (flow_catalogue, flow_id, "out", allocated = True)
set_weights_on_available_capa(BIGK, nx_topology)
control = True
except nx.NetworkXNoPath:
path = []
i[3]=True
#print "NON C'E' UN PATH"
continue
if 'in' in flow_dict and 'size' in flow_dict['in']:
work_nx_multidigraph = nx_topology.copy() # Create a working copy
size = flow_dict['in']['size']
prune_graph_by_available_capacity(work_nx_multidigraph, size)
try:
path = nx.dijkstra_path(work_nx_multidigraph, dst, src,'weight')
allocate_flow (nx_topology, path, size, "%s-in" % flow_id)
store_path (nx_flows, dst, src, flow_id, path)
set_allocated (flow_catalogue, flow_id, "in", allocated = True)
set_weights_on_available_capa(BIGK, nx_topology)
except nx.NetworkXNoPath:
path = nx_flows[src][dst][flow_id]['path']
size = flow_dict['out']['size']
de_allocate_flow (nx_topology, path, size, "%s-out" % flow_id)
delete_path (nx_flows, src, dst, flow_id)
set_allocated (flow_catalogue, flow_id, "out", allocated = False)
set_weights_on_available_capa(BIGK, nx_topology)
return control
def allocate_flow (nx_multidigraph, path, size, flow_id):
for i in range (0, len(path) - 1):
index = 0
max_av_cap = 0.0
j=0
for j in nx_multidigraph[path[i]][path[i + 1]]:
av_cap = (nx_multidigraph[path[i]][path[i + 1]][j]['capacity'] - nx_multidigraph[path[i]][path[i + 1]][j]['allocated'])
if av_cap > max_av_cap:
max_av_cap = av_cap
index=j
nx_multidigraph[path[i]][path[i + 1]][index]['allocated'] = nx_multidigraph[path[i]][path[i + 1]][index]['allocated'] + size
#nx_multidigraph[path[i]][path[i + 1]][index]['flows'].append(flow_id)
return
# De-allocazione del flusso nel path sostituito.
def de_allocate_flow (nx_multidigraph, path, size, flow_id):
for i in range (0, len(path) - 1):
for index in nx_multidigraph[path[i]][path[i + 1]]:
if flow_id in nx_multidigraph[path[i]][path[i + 1]][index]['flows']:
nx_multidigraph[path[i]][path[i + 1]][index]['allocated'] = nx_multidigraph[path[i]][path[i + 1]][index]['allocated'] - size
nx_multidigraph[path[i]][path[i + 1]][index]['flows'].remove(flow_id)
break
return
# Metodo che permette di registrare il path del flusso di traffico che stiamo considerando.
def store_path (nx_multidigraph, src, dst, flow_id, path):
nx_multidigraph[src][dst][flow_id]['path'] = path
return
def delete_path (nx_multidigraph, src, dst, flow_id):
nx_multidigraph[src][dst][flow_id]['path'] = []
return
# Metodo che permette di registrare se un flusso di traffico e' stato allocato o no.
def set_allocated (flow_catalogue, flow_id, direction, allocated = True):
if direction == 'out':
flow_catalogue[flow_id][2]['out']['allocated']=allocated
if direction == 'in':
flow_catalogue[flow_id][2]['in']['allocated']=allocated
def prune_graph_by_available_capacity(nx_multidigraph, size, list_node, tolerance = False,):
for edge in nx_multidigraph.edges_iter(data = True):
if 'capacity' in edge[2]:
epsilon = (float(edge[2]['capacity']) * CONFIDENCE)/100
if (edge[2]['capacity'] - edge[2].get('allocated',0) - epsilon >= size):
continue
else:
if tolerance:
continue
nx_multidigraph.remove_edge(edge[0],edge[1])
def set_weights_on_available_capa(BIGK, nx_multidigraph):
for edge in nx_multidigraph.edges_iter(data = True):
edge[2]['weight'] = float(BIGK)/(edge[2].get('capacity',BIGK) - edge[2].get('allocated',0))
return
<file_sep>/README.md
# SDN-TE-SR-tools
Tools for SDN based Traffic Engineering and Segment Routing
It includes
* parsers-generators
* java-te-sr
* OSHI-SR-pusher
## parsers-generators
It is a collection of tools for:
1. parsing topologies and converting them among different formats
2. generating traffic demands and evaluate some metrics
We consider two examples:
## Examples
### Small scale topology (Segment Routing paths deployment in the emulator)
In this example we parse a topology generated with Topology3D GUI, extract the set of flows (Virtual Leased Lines), allocate a Segment Routing path for each flow and then deploy the SR path on the Mininet emulator that emulates the topology.
* Set ENABLE_SEGMENT_ROUTING = True in nodes.py of the project Dreamer-Mininet-Extensions (Line 32)
* In the Topology3D GUI load the example topology: from the top bar "Topology" menu, select "Import topology from file"
Choose the file /home/user/workspace/sdn-te-sr-tools/parsers-generators/t3d/small-topo2-4-vll.t3d
* Deploy the topology: In the left frame, from the Deployment menu, select Deploy.
In the deployment window on the bottom, type deploy and then press enter.
* Identify the controller IP address from the output of the deployment script and run the controller from a console in your VM (root password is "root"):
```
$ ssh -X [email protected]
# ./ryu_start.sh
```
(or manually:)
```
$ ssh -X [email protected]
# cd /home/user/workspace/dreamer-ryu/ryu/app
# ryu-manager rest_topology.py ofctl_rest.py --observe-links
```
* Generates the flow catalogue to be handed over to the SR allocation algorithm (properly replace the controller IP address), from a second console in your VM, then move the files in topology and flows folders of java-te-sr project
```
$ cd /home/user/workspace/Mantoo-scripts-and-readme
$ ./generate_topo_and_flow_cata.sh 10.255.245.1:8080
```
(or manually:)
```
$ cd /home/user/workspace/sdn-te-sr-tools/parsers-generators
$ python parse_transform_generate.py --in ctrl_ryu --out nx --generate_flow_cata_from_vll_pusher_cfg --controller 10.255.245.1:8080
$ mv flow_catalogue.json ../java-te-sr/flow/
$ mv links.json ../java-te-sr/topology/
$ mv nodes.json ../java-te-sr/topology/
```
* Check the generated flow catalogue
```
$ cat /home/user/workspace/sdn-te-sr-tools/parsers-generators/flow_catalogue.json
```
* Run the SR allocation algorithm
* Open Eclipse (from the Applications Menu at the top left, select Development->Eclipse)
* set Main parameters (right click on UniPR-SDN-TE-SR project, Run as-> Run Configurations, in the Arguments tab edit the Program arguments as follows)
```
topo_in=topology/links.json
topo_out=topology/links.json.out
flows_in=flow/flow_catalogue.json
flows_out=flow/flow_catalogue.json.out
```
* Run (right click on UniPR-SDN-TE-SR project, Run as-> Run Configurations, click the Run button at the right bottom NB the Main class should be it.unipr.netsec.sdn.run.Main) - do not expect any message on the console, if everything is OK nothing is returned
* Move flow_catalogue.json.out to OSHI-SR-pusher and run sr_vll_pusher
```
$ cd /home/user/workspace/Mantoo-scripts-and-readme
$ ./sr_pusher_start.sh 10.255.245.1:8080 --add
```
(or manually:)
```
$ cd /home/user/workspace/sdn-te-sr-tools
$ mv java-te-sr/flow/flow_catalogue.json.out OSHI-SR-pusher/out_flow_catalogue.json
$ cd /home/user/workspace/sdn-te-sr-tools/OSHI-SR-pusher/
$ rm sr_vlls.json
$ ./sr_vll_pusher.py --controller 10.255.245.1:8080 --add
```
* Now you can ping over a VLL implemented using segment routing. For example ping from cer1 to cer9:
* open a shell on cer1 (on the web GUI with CTRL-Left-Click on cer1 or with
```
ssh -X [email protected]
```
* show the interfaces of cer1
```
ip a
```
* ping to cer9 on the VLL
```
ping 10.0.14.2
```
### Large scale topology (no actual Segment Routing paths deployment in the emulator)
In this example we parse a graphml file that represents a large scale topology (>100 nodes), select a subset of nodes as edge nodes, generate the traffic demands (a set of flows among the edge nodes). Then we use a classical Traffic Engineering approach to select TE paths for the flows and then a optimal Segment Routing allocation algorithm to allocate SR path. We are able to evaluate percentage of allocated and rejected flows and metrics about the Segment Routing paths. We are not deploying the SR patch, because the topology is too big to be emulated in the Mininet emulator.
* using parsers-generators we parse a graphml file that represents a large scale topology (>100 nodes), select a subset of nodes as edge nodes, generate a catalogue of flows among the edge nodes, export the topology and the catalogue in json files, called nodes.json and links.json (for the topology) and flow_catalogue.json for the traffic demands
```
$ cd /home/user/workspace/sdn-te-sr-tools/parsers-generators
$ python parse_transform_generate.py --f graphml/Colt_2010_08-153N.graphml --in graphml --out nx --select_edge_nodes --generate_demands --access_node_prob 0.4 --t_rel_prob 0.2 --mean_num_flows 4 --max_num_flows 10 --link__to_t_rel_ratio 10
```
* using java-te-sr we allocate the flows on the topology with a classical Traffin Engineering (TE) approach, then we take in input the selected TE-paths and allocate the SR paths
<file_sep>/parsers-generators/demand_gen_ste.py
#!/usr/bin/python
#traffic_rel_probability e' la soglia che determina se tra due nodi c'e' un almeno un flusso
#p e' la soglia che determina se c'e' piu' di un flusso tra X E Y, segue una distribuzione geometrica
import random
import numpy as np
#n_medio_flussi_multipli = 3 # nemero medio di flussi multipli
#link_capa_to_traff_rel_ratio = 0.04 #link_capa_to_traff_rel_ratio: ratio of the demand between two access node and the average link capacity
def add_multiple_flows(flow_catalogue, s, src, dst, avg_flow_size): # e' il modello che carica i flussi multipli
for k in range(0,s):
#count = count +1
#out_file.write("il flusso "+str(count)+" ["+str(src)+", "+str(dst)+"]\n")
size_out = capacita_flusso_mod_esp(avg_flow_size)
size_in = capacita_flusso_mod_esp(avg_flow_size)
flow_id = get_id()
flow_catalogue[flow_id]=(src,dst,{'id': flow_id, 'out':{'path': [], 'size': size_out, 'allocated': False, 'srcPort': '', 'dstPort':'', 'type':'vll'},'in':{'path': [], 'size': size_in, 'allocated': False, 'srcPort': '', 'dstPort':'', 'type':'vll'}})
#returns the average link capacity
def avg_link_capacity(nx_topology):
C = 0
count = 0
for edge in nx_topology.edges_iter(data=True):
C = C + edge[2]['capacity']
count = count +1
if count > 0:
C = C / float(count) #average link capacity
print "average link capacity: ", C
return C
#returns the avg_flow_demand that corresponds to the link_capa_to_traff_rel_ratio
def avg_flow_demand(avg_link_capa, link_capa_to_traff_rel_ratio, avg_num_flows):
# avg_num_flows * r = C / link_capa_to_traff_rel_ratio
# r = peso medio dei flussi
# C = capacita' media dei link
r = (avg_link_capa / link_capa_to_traff_rel_ratio) / float(avg_num_flows)
return r
def capacita_flusso_mod_esp(avg_flow_size):
a=random.expovariate(1/float(avg_flow_size)) # la capacita' dei flussi segue un modello esponenziale limitato tra 1 e 10
return a # non e' limitato!!
def capacita_flusso_mod_unif(avg_flow_size):
cap_min=5
cap_max=(2*avg_flow_size)-cap_min
a=random.uniform(cap_min,cap_max)
return a
# it adds a key to the node properties, with value True
# p_mark is the probability to mark a node
# returns the number of marked nodes and the total number of nodes
def add_nodes_marks(nx_topology, p_mark=1, key_to_add='mark'):
total = 0
marked = 0
for my_node, node_dict in nx_topology.nodes_iter(data=True):
#print my_node, node_dict
total = total + 1
if random.random() < p_mark:
node_dict[key_to_add]=True
marked = marked +1
return marked, total
# it removes a key from the node properties
def del_nodes_marks (nx_topology, key_to_remove):
for my_node, node_dict in nx_topology.nodes_iter(data=True):
if key_to_remove in node_dict:
del node_dict[key_to_remove]
#traffic_rel_probability probability that two access nodes have a traffic relation
def build_flows(nx_topology, traffic_rel_probability=1, avg_num_flows=1, max_num_flows=10, link_capa_to_traff_rel_ratio=20):
flow_catalogue = {}
avg_flow_size = avg_flow_demand(avg_link_capacity(nx_topology), link_capa_to_traff_rel_ratio, avg_num_flows)
edge_nodes = list()
for my_node, node_dict in nx_topology.nodes_iter(data=True):
if 'is_edge' in node_dict and node_dict['is_edge']:
edge_nodes.append({my_node:node_dict})
#print "number of access nodes ", len(edge_nodes)
for src in range(len(edge_nodes)):
#print edge_nodes[i]
for dst in range (src+1, len(edge_nodes)):
#print src, dst
if random.random() < traffic_rel_probability:
for node_id_src in edge_nodes[src]:
for node_id_dst in edge_nodes[dst]:
#print edge_nodes[src][node_id]
s = min (np.random.geometric((1/float(avg_num_flows))),max_num_flows)
#print "s= ", s
add_multiple_flows(flow_catalogue, s, node_id_src, node_id_dst, avg_flow_size)
return flow_catalogue
# if access_nodes<>0 is given as input, it aslo provides the ratio between actual traffic
# relations and possiible traffic relations
def get_flow_catalogue_stats(flow_catalogue, access_nodes=0):
out_dict={}
out_dict['num_of_flows_in_catalogue']=len(flow_catalogue)
overall_size_sum = 0
overall_count = 0
unidir_traffic_relations={}
for my_flow_id, flow_data in flow_catalogue.iteritems():
if 'out' in flow_data[2] and 'size' in flow_data[2]['out']:
overall_count += 1
overall_size_sum += flow_data[2]['out']['size']
if not (flow_data[0], flow_data[1]) in unidir_traffic_relations:
unidir_traffic_relations[flow_data[0], flow_data[1]]={}
if 'in' in flow_data[2] and 'size' in flow_data[2]['in']:
overall_count += 1
overall_size_sum += flow_data[2]['in']['size']
if not (flow_data[1], flow_data[0]) in unidir_traffic_relations:
unidir_traffic_relations[flow_data[1], flow_data[0]]={}
# if not 'num_flows' in unidir_traffic_relations[flow_data[0], flow_data[1]]:
# unidir_traffic_relations[flow_data[0], flow_data[1]]['num_flows']=0
# unidir_traffic_relations[flow_data[0], flow_data[1]]['num_flows']+=1
out_dict['num_of_unidir_flows']=overall_count
avg_flow_size=0
if overall_count > 0:
avg_flow_size=overall_size_sum/float(overall_count)
out_dict['avg_flow_size']=avg_flow_size
out_dict['unidir_traffic_relations_number']=len(unidir_traffic_relations)
if access_nodes>0:
out_dict['unidir_traffic_relations_percentage']= float(len(unidir_traffic_relations))/access_nodes/(access_nodes-1)*100
if len(unidir_traffic_relations) > 0:
out_dict['avg_num_of_flow_per_unidir_traffic_relation']=overall_count/float(len(unidir_traffic_relations))
#for traffic_relation in unidir_traffic_relations:
return out_dict
def get_id():
#TODO it could be possible to replace with sip
if not hasattr(get_id, "counter"):
get_id.counter = -1 # it doesn't exist yet, so initialize it
get_id.counter += 1
return str(get_id.counter)
<file_sep>/parsers-generators/Ford_Fulkerson_Algorithm.py
import networkx as nx
from networkx.algorithms.flow import ford_fulkerson
import random
from math import sqrt
def Ford_Fulkerson(mod,nx_topology, nx_flows, num_random_rep, Risultati_Test):
G = nx.DiGraph()
for edge in nx_topology.edges_iter(data=True):
G.add_edge(edge[0], edge[1], capacity=edge[2]['capacity'])
G_copy = G.copy()
nx_topology_copy=nx_topology.copy()
list_node=[] # contiene tutti i nodi di bordo, source e sink su cui calcolare il maxflow
list_result_maxflow = [] # e' l'array che contiene tutti i maxflow di tutte le num_random_rep combinazioni ingresso uscita
seed = 5
for edge in nx_topology.nodes_iter(data=True):
if edge[1]['type_node'] == 'bordo' :
for edge1 in nx_topology_copy.nodes_iter(data=True):
if edge1[1]['type_node'] == 'bordo' and edge[0] != edge1[0] :
list_node.append([edge[0],edge1[0]])
Algorithm(G_copy,list_node, list_result_maxflow, mod, Risultati_Test)
#Risultati_Test.write(str(G_copy.edges(data=True))+"\n\n")
G_copy = G.copy()
for i in range(0,num_random_rep-1):
random.seed(seed+i)
random.shuffle(list_node)
Algorithm(G_copy, list_node, list_result_maxflow, mod, Risultati_Test)
#Risultati_Test.write(str(G_copy.edges(data=True))+"\n\n")
G_copy = G.copy()
Risultati_Test.write("Elenco max flow ([source, sink, maxflow]):\n")
for i in list_result_maxflow:
Risultati_Test.write(str(i))
Risultati_Test.write("\n")
list_value = Calculation_of_value(list_result_maxflow, Risultati_Test)
return list_value
def Algorithm(G, list_node, list_result_maxflow, mod, Risultati_Test): #calcola il flusso massimo utilizzando l'algoritmo di <NAME> tra il source e il sink
result_maxflow = []
for i in list_node:
max_flow = ford_fulkerson(G,i[0],i[1]) #max flow e' una lista che contiene nel primo elemento il max flow della rete, nel secondo elemento contiene una lista con tutti i link della rete e le relative capacita' utilizzate dal flusso
if mod=="dipendente":
G = New_Topology(G, max_flow)
#if max_flow[0] != 0: #NON INSERISCE NELLA LISTA LE COPPIE CHE HANNO MAXFLOW UGUALE A ZERO
result_maxflow.append([i[0],i[1],max_flow[0]]) #e' un'array che contiene [source, sink, maxflow]
list_result_maxflow.append(result_maxflow)
def New_Topology(G, max_flow): #Aggiorna le capacita' rimanenti nella topologia dopo aver calcolato il max flow tra un Source e un Sink, in questo modo le prove non sono indipendenti l'una dalle altre
for edge in G.edges_iter(data = True):
for k in max_flow[1].iteritems():
if edge[0] == k[0]:
for j in k[1].iteritems():
if edge[1] == j[0]:
edge[2]['capacity'] = edge[2]['capacity'] - j[1] #toglie alla capacita' della rete la capacita' residua del
return G
def Calculation_of_value(list_result_maxflow, Risultati_Test): #Da come risultato minimo maxflow, massimo max_flow, valore medio, deviazione standard di ogni coppia sorgente destinazione
massimo = 0
minimo = 0
val_medio = 0
var_parz = 0
list_value = [] # lista che contiene [source, sink, min, max, val_medio, deviazione standard] del max flow
cont=0
list_result_maxflow_copy=list_result_maxflow[:]
del list_result_maxflow_copy[0]
for i in list_result_maxflow[0]:
val_medio = i[2]
minimo = i[2]
massimo = i[2]
n=1
for k in list_result_maxflow_copy:
for j in k:
if i[0]==j[0] and i[1]==j[1]:
n=n+1
val_medio = val_medio+ j[2]
if j[2]<minimo:
minimo=j[2]
if j[2]>massimo:
massimo=j[2]
val_medio=float(val_medio)/n #val_medio= 1/n*(som x)
list_value.append([i[0], i[1], minimo, massimo, val_medio, 0])
for i in list_result_maxflow[0]:
var_parz = (i[2]-list_value[cont][4])**2
n=1
for k in list_result_maxflow_copy:
for j in k:
if i[0]==j[0] and i[1]==j[1]:
n=n+1
var_parz = var_parz + (j[2]-list_value[cont][4])**2
var_camp_corr=float(1)/(n)*var_parz #varianza campionaria corretta= 1/(n-1)*Som(x-val_medio)^2
list_value[cont][5] = sqrt(var_camp_corr) #deviazione standard
cont = cont +1
Risultati_Test.write("\nLista che contiene i valori calcolati [source, sink, minimo maxflow, massimo max_flow, valore medio, deviazione standard]:\n")
for i in list_value:
Risultati_Test.write(str(i)+"\n")
return list_value
<file_sep>/parsers-generators/sendEmail.py
import os
os.system("sendmail -t < /home/user/email.txt")
<file_sep>/parsers-generators/parse_transform_generate.py
#!/usr/bin/python
#######################################################################################################
# Copyright (C) 2015 <NAME> - (Consortium GARR and University of Rome "Tor Vergata")
# Copyright (C) 2015 <NAME>, <NAME>, <NAME> - (CNIT and University of Rome "Tor Vergata")
# www.garr.it - www.uniroma2.it/netgroup - www.cnit.it
#
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# te traffic engineering
#
# @author <NAME> <<EMAIL>>
# @author <NAME> <<EMAIL>>
# @author <NAME> <<EMAIL>>
# @author <NAME> <<EMAIL>>
#######################################################################################################
"""
Parses and transforms topologies between different formats - Generates traffic demands
"""
import os
import json
import inspect
import sys
import networkx as nx
import copy
import argparse
import random
import siphash
import time
import xml.etree.ElementTree as ET
import re
from topologybuilder import *
import numpy as np
from distribuzioni import *
from timer_shortestpath import *
from timer_cspf_euristico import *
from timer_cspf import *
from graphml2nx_ste import *
from nx2t3d import *
from nx2json import *
from demand_gen_ste import *
from flowbuilder import *
from topologybuilder import *
CONFIDENCE = 5
CTRL = 'ryu'
n_nodi_core = 0
n_nodi_di_bordo = 0
FLOW_CATALOGUE_FILENAME = 'flow_catalogue.json'
VLL_TOKEN_IN_T3D = 'Vll'
DATA_TOKEN_IN_T3D = 'Data'
PW_TOKEN_IN_T3D = 'PW'
def set_weights_on_available_capa(BIGK, nx_multidigraph):
"""Set the links weights considering the availale capcity and the allocated capacity"""
for edge in nx_multidigraph.edges_iter(data = True):
edge[2]['weight'] = float(BIGK)/(edge[2].get('capacity',BIGK) - edge[2].get('allocated',0))
return
def multidigraph_from_flow_catalogue (fc_dict):
"""Transform the catalogue of the flows in a nx multidigraph"""
nx_flows = nx.MultiDiGraph()
for flow_id, (src, dst, flow_dict) in fc_dict.iteritems():
if 'out' in flow_dict and 'size' in flow_dict['out']:
nx_flows.add_edge(src, dst, flow_id, {'size':flow_dict['out']['size'], 'path':[]})
if 'in' in flow_dict and 'size' in flow_dict['in']:
nx_flows.add_edge(dst, src, flow_id, {'size':flow_dict['in']['size'], 'path':[]})
return nx_flows
def prune_graph_by_available_capacity(nx_multidigraph, size, tolerance = False,):
"""Prune from the topologies all the links that do not have enough available capacity
to support the traffic relation (size is the bitrate)
"""
for edge in nx_multidigraph.edges_iter(data = True):
if 'capacity' in edge[2]:
epsilon = (float(edge[2]['capacity']) * CONFIDENCE)/100
if (edge[2]['capacity'] - edge[2].get('allocated',0) - epsilon >= size):
continue
else:
if tolerance:
continue
nx_multidigraph.remove_edge(edge[0],edge[1])
# Metodo che permette di registrare il path del flusso di traffico che stiamo considerando.
def store_path (nx_multidigraph, src, dst, flow_id, path):
nx_multidigraph[src][dst][flow_id]['path'] = path
return
def delete_path (nx_multidigraph, src, dst, flow_id):
nx_multidigraph[src][dst][flow_id]['path'] = []
return
# Metodo che permette di registrare se un flusso di traffico e' stato allocato o no.
def set_allocated (flow_catalogue, flow_id, direction, allocated = True):
if direction == 'out':
flow_catalogue[flow_id][2]['out']['allocated']=allocated
if direction == 'in':
flow_catalogue[flow_id][2]['in']['allocated']=allocated
def calculate_t_s(total_capacity, nx_multidigraph):
"""Calculates T in seconds"""
t = 0
temp = 0
for edge in nx_multidigraph.edges_iter(data = True):
temp = ((float(edge[2]['allocated']) / (edge[2]['capacity'] -
edge[2]['allocated']))) + float(temp)
t = (float(1) / total_capacity) * temp
return t
# Calculate T in mseconds
def calculate_t_ms(total_capacity, nx_multidigraph):
return (calculate_t_s * 1000)
def calculate_l(total_capacity, nx_multidigraph, S):
l = 0
for edge in nx_multidigraph.edges_iter(data = True):
l = (float(1) / total_capacity) * ( (float(edge[2]['capacity'])*S) / ( (float(edge[2]['capacity'] -
edge[2]['allocated'])*S) ) ** 2) * (10 ** 7)
edge[2]['l_value'] = l
return
#Print functions
"""
print(nx_topology.nodes()) # Print nodes of the nx_topology
print(nx_topology.edges()) # Print edges of the nx_topology
print(nx_topology.edge[1][3][0]['capacity']) # Print the attribute capacity of the link (1,3)
print(nx_topology[1][3]) # Print the map of the link (1,3)
print(nx_topology.edge[2]) # Print the adjancies of the node 2
"""
def retrieve_flows(controllerRestIP):
"""Retrieves the flows from the vll_pusher.cfg and generates the flow_catalogue
Stefano 2016-03-22: it looks like this method is outdated
"""
global pusher_cfg
flow_catalogue = {}
# 100 Kb/s
ub_static_rate = 100
lb_static_rate = 50
print "*** Read Configuration File"
path = "vll_pusher.cfg"
if os.path.exists(path):
conf = open(path,'r')
pusher_cfg = json.load(conf)
conf.close()
else:
print "No Configuration File Find In %s" % path
sys.exit(-2)
retrieve_port_number_and_mac(controllerRestIP)
i = 0
for vll in pusher_cfg['vlls']:
size = random.uniform(lb_static_rate, ub_static_rate)
flow_catalogue[i] = (vll['lhs_dpid'].replace(":",""), vll['rhs_dpid'].replace(":",""), {'out':{'size': size, 'allocated': False, 'srcPort': vll['lhs_intf'], 'dstPort':vll['rhs_intf'], 'type':'vll'}, 'in':{'size': size, 'allocated': False, 'srcPort': vll['rhs_intf'], 'dstPort':vll['lhs_intf'], 'type':'vll'}})
i = i + 1
for pw in pusher_cfg['pws']:
size = random.uniform(lb_static_rate, ub_static_rate)
flow_catalogue[i] = (pw['lhs_dpid'].replace(":",""), pw['rhs_dpid'].replace(":",""), {'out':{'size': size, 'allocated': False, 'srcPort': pw['lhs_intf'], 'dstPort':pw['rhs_intf'], 'srcMac': pw['lhs_mac'].replace(":",""), 'dstMac': pw['rhs_mac'].replace(":",""), 'type':'pw'}, 'in':{'size': size, 'allocated': False, 'srcPort': pw['rhs_intf'], 'dstPort':pw['lhs_intf'], 'srcMac': pw['rhs_mac'].replace(":",""), 'dstMac': pw['lhs_mac'].replace(":",""), 'type':'pw'}})
i = i + 1
print('\nFlow catalogue:')
print(flow_catalogue)
print '###################################', '\n'
return flow_catalogue
def flow_allocator(ctrl_endpoint):
factory = TopologyBuilderFactory()
topobuilder = factory.getTopologyBuilder(CTRL, ctrl_endpoint)
# Retrieves the topology from the CTRL controller and build the networkx topology
topobuilder.parseJsonToNx()
flow_catalogue = retrieve_flows(ctrl_endpoint)
nx_topology = topobuilder.nx_topology
print('\nnx_multidigraph edges')
print(list(nx_topology.edges_iter(data=True)))
print "#############################################################", "\n"
BIGK = topobuilder.max_capacity
# Assign the weights
set_weights_on_available_capa(BIGK, nx_topology)
# Transforms flow_catalogue in a nx multidigraph
nx_flows = multidigraph_from_flow_catalogue(flow_catalogue)
# Assigns the flows
flow_assignment(nx_topology, flow_catalogue, nx_flows, BIGK)
# Pushes the flows
flow_pusher(nx_topology, flow_catalogue, nx_flows, ctrl_endpoint)
def extracts_links(nx_topology_new, my_key, my_value):
""" extracts the links in a nx multidigraph that match the filter
returns a new nx multidigraph, the one in input is not changed
"""
output_nx = nx.MultiDiGraph()
for source,dest,key_iter,d in nx_topology_new.edges_iter(data=True,keys=True):
#print nx_topology_new[source][dest][key]
if my_key in nx_topology_new[source][dest][key_iter]:
if nx_topology_new[source][dest][key_iter][my_key]== my_value:
output_nx.add_edge(source, dest, key=key_iter)
output_nx[source][dest][key_iter] = nx_topology_new[source][dest][key_iter]
else:
#do nothing
pass
else:
#do nothing
pass
return output_nx
def filter_links(nx_topology_new, my_key, my_value):
""" filter the links in a nx multidigraph, keeping only the ones that match the filter
operates on the multidigraph given in input
"""
remove = []
for source,dest,key,d in nx_topology_new.edges_iter(data=True,keys=True):
#print nx_topology_new[source][dest][key]
if my_key in nx_topology_new[source][dest][key]:
if nx_topology_new[source][dest][key][my_key]== my_value:
#do nothing
pass
else:
remove.append([source,dest,key])
else:
remove.append([source,dest,key])
for source,dest,key in remove:
#print "DELETED : ",source,dest,key
del nx_topology_new[source][dest][key]
def serialize(nx_topology_new):
"""generates links.json and nodes.json"""
with open('links.json', 'w') as outfile:
json.dump(nx_topology_new.edges(data=True), outfile, indent=4, sort_keys=True)
outfile.close()
with open('nodes.json', 'w') as outfile:
json.dump(nx_topology_new.nodes(data=True), outfile, indent=4, sort_keys=True)
outfile.close()
def serialize_flow_catalogue(flow_catalogue):
"""generates flow catalogue (flow_catalogue.json)"""
with open(FLOW_CATALOGUE_FILENAME, 'w') as outfile:
json.dump(flow_catalogue, outfile, indent=4, sort_keys=True)
outfile.close()
def print_links(nx_topology_new):
print "link list"
print(list(nx_topology_new.edges_iter(data=True)))
print json.dumps(list(nx_topology_new.edges_iter(data=True)))
print_info (nx_topology_new)
print "#####################################################"
def print_info(nx_topology_new):
print "(nodes, edges, avg_degree) : (", \
nx_topology_new.number_of_nodes(), ",",\
nx_topology_new.number_of_edges(), ",",\
float(nx_topology_new.number_of_edges())/nx_topology_new.number_of_nodes(), \
")"
def generate_flow_cata(nx_topology_links, flow_type, outfile_name):
""" generates the flow catalogue file from a set of nx links
it does not contact the controller, so no port information are added in the catalogue
"""
flow_cata = {}
for source,dest,key_iter,d in nx_topology_links.edges_iter(data=True,keys=True):
flow_id = get_id()
size_out = ''
size_in = ''
flow_cata[flow_id]=(source,dest,{'id': flow_id, 'out':{'path': [], 'size': size_out, 'allocated': False, 'srcPort': '', 'dstPort':'', 'type': flow_type},'in':{'path': [], 'size': size_in, 'allocated': False, 'srcPort': '', 'dstPort':'', 'type': flow_type}})
with open(outfile_name, 'w') as outfile:
json.dump(flow_cata, outfile, indent=4, sort_keys=True)
outfile.close()
def generate_flow_cata_with_ports(controller_ip_port):
""" generates the flow catalogue file from for the vll_pusher.cfg
it contacts the controller, so that port information are added in the catalogue
"""
factory = FlowBuilderFactory()
builder = factory.getFlowBuilder("from_file", controller_ip_port)
builder.parseJsonToFC()
#builder.cataloguePrint()
builder.serialize()
def ctrl_ryu_2_nx(controller_ip_port):
factory = TopologyBuilderFactory()
builder = factory.getTopologyBuilder("ryu", controller_ip_port)
builder.parseJsonToNx()
return builder.nx_topology
def retrieve_link_from_id(nx_multidigraph, lhs, rhs, flow_id):
for index in nx_multidigraph[lhs][rhs]:
if flow_id in nx_multidigraph[lhs][rhs][index]['flows']:
return nx_multidigraph[lhs][rhs][index]
def run_command(args_in):
my_seed = int(args_in.random_seed)
print "SEED : ", my_seed
random.seed(my_seed) #reset random seed to have repeteable output
np.random.seed(my_seed) #reset random seed for numpy library (used in geometric distribution)
nx_topology_new = nx.MultiDiGraph()
##########################################
#import topology phase
##########################################
recognized_file_type_in = False
if args_in.file_type_in=='graphml':
recognized_file_type_in = True
#print inspect.getfile(parse_graphml)
parse_graphml(nx_topology_new, args_in.file, defa_node_type="OSHI-CR", defa_link_type="Data", allow_multilink=False)
print "imported a topology from graphml file"
print_info(nx_topology_new)
if args_in.file_type_in=='nx':
recognized_file_type_in = True
with open("nodes.json") as data_file:
nodes_file = json.load(data_file)
#print json.dumps(nodes_file)
for node_couple in nodes_file:
#print node_couple[0]
nx_topology_new.add_node(node_couple[0], attr_dict=node_couple[1])
with open("links.json") as data_file:
links_file = json.load(data_file)
#print json.dumps(links_file)
for link_triple in links_file:
#print link_triple[2]['id']
nx_topology_new.add_edge(link_triple[0], link_triple[1], key=link_triple[2]['id'])
for key_dict in link_triple[2]:
#print key_dict
nx_topology_new[link_triple[0]][link_triple[1]][link_triple[2]['id']][key_dict]=link_triple[2][key_dict]
print "imported a topology from nodes.json and links.json"
print_info(nx_topology_new)
if args_in.file_type_in=='t3d':
recognized_file_type_in = True
t3d_json_2_nx(nx_topology_new, args_in.file)
print "imported a topology t3d file"
print_info(nx_topology_new)
if args_in.file_type_in=='ctrl_ryu':
recognized_file_type_in = True
nx_topology_new = ctrl_ryu_2_nx(args_in.controllerRestIp)
if not recognized_file_type_in:
print "--in argument not recognized"
sys.exit(-1)
##########################################
# selecting source/destination nodes
##########################################
if args_in.select_edge_nodes:
#### randomly adds the key value pair 'is_edge'=True to a subset of nodes (needed to generate traffic demand)
(access_nodes, total_nodes) = add_nodes_marks(nx_topology_new, p_mark=float(args_in.access_prob), key_to_add='is_edge')
print "number of access nodes ", access_nodes, " (", float(access_nodes)/total_nodes*100, "% )"
#del_nodes_marks(nx_topology_new, "is_edge")
#uncomment this line if you want to output the links.json and node.json
#serialize(nx_topology_new)
##########################################
#traffic demand generation phase
##########################################
if args_in.generate_demands:
#flow_catalogue = build_flows(nx_topology_new, traffic_rel_probability=0.2, avg_num_flows = 4, max_num_flows = 10, link_capa_to_traff_rel_ratio=10)
flow_catalogue = build_flows(nx_topology_new,
traffic_rel_probability=float(args_in.t_rel_prob),
avg_num_flows = float(args_in.mean_num_flows),
max_num_flows = int(args_in.max_num_flows),
link_capa_to_traff_rel_ratio=float(args_in.l_to_t_rel_prob)
)
serialize_flow_catalogue(flow_catalogue)
print "generated traffic demands"
dict=get_flow_catalogue_stats(flow_catalogue, access_nodes)
print dict
#print_links(nx_topology_new)
##########################################
#topology output/convertion phase
##########################################
if args_in.file_type_out=='t3d':
nx_2_t3d_json(nx_topology_new, 'out.t3d')
print "converted topology in .t3d format"
if args_in.file_type_out=='nx':
if args_in.filters_only_data_link:
print "filtered only links with view = Data"
filter_links(nx_topology_new, 'view', DATA_TOKEN_IN_T3D)
#output the links.json and node.json
serialize(nx_topology_new)
print "serialized topology in links.json and node.json"
if args_in.generate_vll_pw_flow_cata:
#only vll catalogue is considered
vll_list = extracts_links(nx_topology_new, 'view', VLL_TOKEN_IN_T3D)
if vll_list.size() > 0:
generate_flow_cata (vll_list, flow_type = 'vll', outfile_name = 'flow_cata_vll.json')
print "serialized vll_list in flow_cata_vll.json"
pw_list = extracts_links(nx_topology_new, 'view', PW_TOKEN_IN_T3D)
if pw_list.size() > 0:
generate_flow_cata (pw_list, flow_type = 'pw', outfile_name = 'flow_cata_pw.json')
print "serialized pw_list in flow_cata_pw.json"
if args_in.generate_flow_cata_from_vll_pusher_cfg:
generate_flow_cata_with_ports(args_in.controllerRestIp)
print "serialized flow catalogue in flow_catalogue.json"
#t3d_json_2_nx(nx_topology_new, 'out.t3d')
#add_edge_nodes (nx_topology_new)
#print_links(nx_topology_new)
#flow_allocator(args.controllerRestIp)
# 1) transforms a topology from graphml to topology3d
# python parse_transform_generate.py --f graphml/Colt_2010_08-153N.graphml --in graphml --out t3d
# 2) use a graphml topology and generates demands (selecting a subset of nodes as edge nodes), outputs links and nodes (nx model) and flow catalogue
# python parse_transform_generate.py --f graphml/Colt_2010_08-153N.graphml --in graphml --out nx --select_edge_nodes --generate_demands --access_node_prob 0.4 --t_rel_prob 0.2 --mean_num_flows 4 --max_num_flows 10 --link__to_t_rel_ratio 10
# 3.1) use a topology3d topology, outputs links and nodes (nx model) and two flow catalogues, one for vll and one for pseudo wires
# python parse_transform_generate.py --f t3d/smpython parse_transform_generate.py --f t3d/small-topo2-4-vll.t3d --in t3d --out nx --filters_only_data_link --generate_flow_cata_from_vll_pusher_cfg --controller 10.255.245.1:8080all-topology.t3d --in t3d --out nx --filters_only_data_link --generate_vll_pw_flow_cata
# 3.2) same as above, with a different topology3d used in input
# python parse_transform_generate.py --f t3d/small-topo2-4-vll.t3d --in t3d --out nx --filters_only_data_link --generate_vll_pw_flow_cata
# 4) use a topology3d topology and generate flow catalogue from vll_pusher.cfg generated by Dreamer-Mininet-Extensions
# python parse_transform_generate.py --f t3d/small-topo2-4-vll.t3d --in t3d --out nx --filters_only_data_link --generate_flow_cata_from_vll_pusher_cfg --controller 10.255.245.1:8080
# 5) use a topology from the ryu controller and generate flow catalogue from vll_pusher.cfg generated by Dreamer-Mininet-Extensions
# python parse_transform_generate.py --in ctrl_ryu --out nx --generate_flow_cata_from_vll_pusher_cfg --controller 10.255.245.1:8080
def parse_cmd_line():
parser = argparse.ArgumentParser(description="Parses and transforms topologies between different formats - Generates traffic demands")
parser.add_argument('--controller', dest='controllerRestIp', action='store', default='', help='used to connect to the controller and obtain the port numbers --controller IP:RESTport, e.g., localhost:8080 or A.B.C.D:8080')
parser.add_argument('--f', dest='file', action='store', help='input file to parse')
parser.add_argument('--in', dest='file_type_in', action='store', default='graphml', help='type of input file or controller, default = graphml, options = t3d, nx, ctrl_ryu')
parser.add_argument('--out', dest='file_type_out', action='store', default='t3d', help='type of output file, default = t3d, options = nx')
parser.add_argument('--generate_demands', dest='generate_demands', action='store_true', help='used to generate the traffic demands')
parser.add_argument('--select_edge_nodes', dest='select_edge_nodes', action='store_true', help='marks edge nodes (needed to generate the traffic demands)')
parser.add_argument('--filters_only_data_link', dest='filters_only_data_link', action='store_true', help='outputs only links with view = Data')
parser.add_argument('--generate_vll_pw_flow_cata', dest='generate_vll_pw_flow_cata', action='store_true', help='generates flowcata of vll and pw from t3d file')
parser.add_argument('--generate_flow_cata_from_vll_pusher_cfg', dest='generate_flow_cata_from_vll_pusher_cfg', action='store_true', help='generates flow catalogue from vll_pusher.cfg')
parser.add_argument('--access_node_prob', dest='access_prob', action='store', default='1', help='probability of a node to be an access node, default = 1')
parser.add_argument('--t_rel_prob', dest='t_rel_prob', action='store', default='1', help='probability of a traffic relation between nodes, default = 1')
parser.add_argument('--mean_num_flows', dest='mean_num_flows', action='store', default='1', help='average number of flows in a traffic relation, default = 1')
parser.add_argument('--max_num_flows', dest='max_num_flows', action='store', default='1', help='maximum number of flows in a traffic relation, default = 1')
parser.add_argument('--link__to_t_rel_ratio', dest='l_to_t_rel_prob', action='store', default='1', help='ratio between avg link capa and sum of flow rates in a traffic relation, default = 1')
parser.add_argument('--seed', dest='random_seed', action='store', default='69', help='seed for the random number generato, default = 69')
args = parser.parse_args()
if len(sys.argv)==1:
parser.print_help()
sys.exit(1)
return args
if __name__ == '__main__':
args = parse_cmd_line()
run_command(args)<file_sep>/parsers-generators/multi-traffic-demand-generator.py
import os
def single_execution():
base_filename = "flow_cata_topo_%s_%s_%s_%s_%s_%s_seed%s" \
%(topology, access_node_prob, t_rel_prob, mean_num_flows, max_num_flows, link__to_t_rel_ratio, my_seed)
# command = "python ste-test.py %s --in %s --out t3d --access_node_prob %s --t_rel_prob %s --mean_num_flows %s --max_num_flows %s --link__to_t_rel_ratio %s --seed %s > %s/%s.info" \
command = "python ste-test.py %s --in %s --out t3d --access_node_prob %s --t_rel_prob %s --mean_num_flows %s --max_num_flows %s --link__to_t_rel_ratio %s --seed %s > %s/%s.info" \
%(file_in_name, file_in_type, access_node_prob, t_rel_prob, mean_num_flows, max_num_flows, link__to_t_rel_ratio, my_seed, folder, base_filename)
print command
os.system (command)
command2 = "cp flow_catalogue.json %s/%s.json" %(folder, base_filename)
print command2
os.system (command2)
folder = "flow_catalogues"
num_nodes = 153
connection = 1
seed = 69
command0 = "python topologybuilder.py --model barabasi-albert --nodes %s --connection %s --seed %s" \
%(num_nodes, connection,seed)
print command0
os.system (command0)
base_topo_filename = "BA_%s_%s_%s" %(num_nodes, connection,seed)
command01 = "cp nodes.json %s/%s.nodes.json" %(folder, base_topo_filename)
print command01
os.system (command01)
command01 = "cp links.json %s/%s.links.json" %(folder, base_topo_filename)
print command01
os.system (command01)
#file_in_type = "graphml"
#file_in_name = "--f graphml/Colt_2010_08-153N.graphml"
file_in_type = "nx_json"
file_in_name = ""
#topology = "colt153"
topology = "BA_%s_%s_69" %(num_nodes,connection)
my_seed = 69
access_node_prob = 0.4
t_rel_prob = 0.2
mean_num_flows = 4
max_num_flows = 10
link__to_t_rel_ratio = 10
for i in [10, 20, 40, 80]: #first set
#for i in [60, 70, 80]: #second set
link__to_t_rel_ratio = i
#for j in [69, 70, 71]:
for j in [69]:
my_seed = j
# single_execution()
access_node_prob = 0.8
#for i in [10, 20, 40, 80, 160, 320]: #first set
for i in [40, 60, 80, 120, 160, 200, 240, 280, 320, 360, 400, 520, 640]: #second set
link__to_t_rel_ratio = i
#for j in [69, 70, 71]:
for j in [69]:
my_seed = j
single_execution()
# python multi-traffic-demand-generator.py
# the generated topology and flows are available in flow_catalogue subfolder
# python ste-test.py --f graphml/Colt_2010_08-153N.graphml --in graphml --out t3d --access_node_prob 0.4 --t_rel_prob 0.2 --mean_num_flows 4 --max_num_flows 10 --link__to_t_rel_ratio 10 > flow_catalogues/blabla.info
<file_sep>/parsers-generators/topologybuilder.py
#!/usr/bin/python
import os
import sys
from pprint import pprint
import argparse
import json
import networkx as nx
import siphash
import matplotlib.pyplot as plt
class TopologyBuilderFactory:
def getTopologyBuilder(self, source, data):
if source == "ryu":
return RyuTopologyBuilder(data)
elif source == "floodlight":
return FloodlightTopologyBuilder(data)
elif source == "erdos-renyi":
return ErdosRenyiTopologyBuilder(data)
elif source == "waxman":
return WaxmanTopologyBuilder(data)
elif source == "barabasi-albert":
return BarabasiAlbertTopologyBuilder(data)
else:
print "Builder %s Not Supported...Exit" %(source)
sys.exit(-1)
class RandomTopologyBuilder:
def __init__(self):
self.nx_topology = None
self.max_capacity = 0.0
key = '<KEY>'
self.sip = siphash.SipHash_2_4(key)
def nx_topoPrint(self):
if self.nx_topology is None:
print []
else:
print self.nx_topology.edges(data=True)
nx.draw_random(self.nx_topology)
plt.show()
def is_connected(self):
return nx.is_strongly_connected(self.nx_topology)
def serialize(self):
with open('links.json', 'w') as outfile:
json.dump(self.nx_topology.edges(data=True), outfile, indent=4, sort_keys=True)
outfile.close()
with open('nodes.json', 'w') as outfile:
json.dump(self.nx_topology.nodes(data=True), outfile, indent=4, sort_keys=True)
outfile.close()
class ErdosRenyiTopologyBuilder(RandomTopologyBuilder):
# n : The number of nodes.
# p : Probability for edge creation.
# seed : Seed for random number generator (default=None).
# directed : If True return a directed graph
DEFAULT_SPEED = 10000
def __init__(self, data):
RandomTopologyBuilder.__init__(self)
self.nodes = data[0]
self.probability = data[1]
self.seed = data[2]
self.directed = data[3]
def generate(self):
erdos = nx.erdos_renyi_graph(self.nodes, self.probability)
self.nx_topology = nx.MultiDiGraph()
self.nx_topology.clear()
index = 0
nodes = []
for node in erdos.nodes():
#SSnodes.append(node+1)
nodes.append(str(node+1))
self.nx_topology.add_nodes_from(nodes)
for (n1, n2) in erdos.edges():
n1 = n1 + 1
n2 = n2 + 1
self.sip.update(str(index))
id_ = str(self.sip.hash())
#SSself.nx_topology.add_edge(n1, n2, capacity=self.DEFAULT_SPEED, allocated=0.0, src_port="", dst_port="", src_port_no="", dst_port_no="", src_mac="", dst_mac="", flows=[], id=id_)
self.nx_topology.add_edge(str(n1), str(n2), capacity=self.DEFAULT_SPEED, allocated=0.0, src_port="", dst_port="", src_port_no="", dst_port_no="", src_mac="", dst_mac="", flows=[], id=id_)
index = index + 1
self.sip.update(str(index))
id_ = str(self.sip.hash())
#SSself.nx_topology.add_edge(n2, n1, capacity=self.DEFAULT_SPEED, allocated=0.0, src_port="", dst_port="", src_port_no="", dst_port_no="", src_mac="", dst_mac="", flows=[], id=id_)
self.nx_topology.add_edge(str(n2), str(n1), capacity=self.DEFAULT_SPEED, allocated=0.0, src_port="", dst_port="", src_port_no="", dst_port_no="", src_mac="", dst_mac="", flows=[], id=id_)
index = index + 1
class WaxmanTopologyBuilder(RandomTopologyBuilder):
# n : Number of nodes
# alpha: Model parameter
# beta: Model parameter
# L : Maximum distance between nodes. If not specified the actual distance is calculated.
# domain : Domain size (xmin, ymin, xmax, ymax)
DEFAULT_SPEED = 10000
def __init__(self, data):
RandomTopologyBuilder.__init__(self)
self.nodes = data[0]
self.alpha = data[1]
self.beta = data[2]
self.L = data[3]
self.domain = data[4]
def generate(self):
waxman = nx.waxman_graph(self.nodes, self.alpha, self.beta, self.L, self.domain)
self.nx_topology = nx.MultiDiGraph()
self.nx_topology.clear()
index = 0
nodes = []
for node in waxman.nodes():
#SS nodes.append(node+1)
nodes.append(str(node+1))
self.nx_topology.add_nodes_from(nodes)
for (n1, n2) in waxman.edges():
n1 = n1 + 1
n2 = n2 + 1
self.sip.update(str(index))
id_ = str(self.sip.hash())
#SSself.nx_topology.add_edge(n1, n2, capacity=self.DEFAULT_SPEED, allocated=0.0, src_port="", dst_port="", src_port_no="", dst_port_no="", src_mac="", dst_mac="", flows=[], id=id_)
self.nx_topology.add_edge(str(n1), str(n2), capacity=self.DEFAULT_SPEED, allocated=0.0, src_port="", dst_port="", src_port_no="", dst_port_no="", src_mac="", dst_mac="", flows=[], id=id_)
index = index + 1
self.sip.update(str(index))
id_ = str(self.sip.hash())
#self.nx_topology.add_edge(n2, n1, capacity=self.DEFAULT_SPEED, allocated=0.0, src_port="", dst_port="", src_port_no="", dst_port_no="", src_mac="", dst_mac="", flows=[], id=id_)
self.nx_topology.add_edge(str(n2), str(n1), capacity=self.DEFAULT_SPEED, allocated=0.0, src_port="", dst_port="", src_port_no="", dst_port_no="", src_mac="", dst_mac="", flows=[], id=id_)
index = index + 1
class BarabasiAlbertTopologyBuilder(RandomTopologyBuilder):
# n : Number of nodes
# m : Number of edges to attach from a new node to existing nodes
# seed : Seed for random number generator (default=None)
DEFAULT_SPEED = 10000
def __init__(self, data):
RandomTopologyBuilder.__init__(self)
self.nodes = data[0]
self.m = data[1]
self.seed = data[2]
def generate(self):
barabasi_albert = nx.barabasi_albert_graph(self.nodes, self.m, self.seed)
self.nx_topology = nx.MultiDiGraph()
self.nx_topology.clear()
index = 0
nodes = []
for node in barabasi_albert.nodes():
#SSnodes.append(node+1)
nodes.append(str(node+1))
self.nx_topology.add_nodes_from(nodes)
for (n1, n2) in barabasi_albert.edges():
n1 = n1 + 1
n2 = n2 + 1
self.sip.update(str(index))
id_ = str(self.sip.hash())
#SSself.nx_topology.add_edge(n1, n2, capacity=self.DEFAULT_SPEED, allocated=0.0, src_port="", dst_port="", src_port_no="", dst_port_no="", src_mac="", dst_mac="", flows=[], id=id_)
self.nx_topology.add_edge(str(n1), str(n2), capacity=self.DEFAULT_SPEED, allocated=0.0, src_port="", dst_port="", src_port_no="", dst_port_no="", src_mac="", dst_mac="", flows=[], id=id_)
index = index + 1
self.sip.update(str(index))
id_ = str(self.sip.hash())
#SSself.nx_topology.add_edge(n2, n1, capacity=self.DEFAULT_SPEED, allocated=0.0, src_port="", dst_port="", src_port_no="", dst_port_no="", src_mac="", dst_mac="", flows=[], id=id_)
self.nx_topology.add_edge(str(n2), str(n1), capacity=self.DEFAULT_SPEED, allocated=0.0, src_port="", dst_port="", src_port_no="", dst_port_no="", src_mac="", dst_mac="", flows=[], id=id_)
index = index + 1
class CtrlTopologyBuilder:
"""docstring for CoTRLTopoTo"""
def __init__(self, controller):
self.IpPort = controller
print "initialized TopologyBuilder with address : ", controller
self.topology = {}
self.nodes = []
self.ports = {}
self.nx_topology = None
self.max_capacity = 0.0
key = '<KEY>'
self.sip = siphash.SipHash_2_4(key)
def ctrl_topoPrint(self):
print json.dumps(self.topology, sort_keys=True, indent=4)
def nx_topoPrint(self):
if self.nx_topology is None:
print []
else:
print self.nx_topology.edges(data=True)
def parseJsonToNx(self):
raise NotImplementedError("Abstract Method")
def serialize(self):
raise NotImplementedError("Abstract Method")
# XXX Out of date
class FloodlightTopologyBuilder(CtrlTopologyBuilder):
def __init__(self, controller):
CtrlTopologyBuilder.__init__(self, controller)
def parseJsonToNx(self):
#command = "curl -s http://"+self.IpPort+"/wm/topology/links/json | python -mjson.tool"
command = "curl --max-time 30 -s http://"+self.IpPort+"/wm/topology/links/json"
result = os.popen(command).read()
if result != "":
self.topology = json.loads(result)
self.nx_topology = nx.MultiDiGraph()
self.nx_topology.clear()
for link in self.topology:
src = link['src-switch'].replace(":","")
dst = link['dst-switch'].replace(":","")
self.nx_topology.add_edge(src, dst, src_port=str(link['src-port']), dst_port=str(link['dst-port']))
class RyuTopologyBuilder(CtrlTopologyBuilder):
def __init__(self, controller):
CtrlTopologyBuilder.__init__(self, controller)
def parseJsonToNx(self):
command = "curl --max-time 30 -s http://"+self.IpPort+"/stats/switches"
result = os.popen(command).read()
if result != "":
try:
switches = json.loads(result)
except ValueError:
print 'Decoding JSON has failed'
print "Error: something does not work in getting info from ryu controller"
sys.exit(-2)
for switch in switches:
command = "curl --max-time 30 -s http://"+self.IpPort+"/stats/portdesc/"+str(switch)
result = os.popen(command).read()
if result != "":
try:
switch_ports = json.loads(result)
except ValueError:
print 'Decoding JSON has failed'
print "Error: something does not work in getting info from ryu controller"
sys.exit(-2)
self.ports[str(switch)] = switch_ports[str(switch)]
command = "curl --max-time 30 -s http://"+self.IpPort+"/v1.0/topology/links"
result = os.popen(command).read()
if result != "":
try:
self.topology = json.loads(result)
except ValueError:
print 'Decoding JSON has failed'
print "Error: something does not work in getting info from ryu controller"
sys.exit(-2)
self.nx_topology = nx.MultiDiGraph()
self.nx_topology.clear()
index = 0
for link in self.topology:
src = link['src']['dpid']
dst = link['dst']['dpid']
src_port = link['src']['name']
dst_port = link['dst']['name']
src_port_no = link['src']['port_no']
dst_port_no = link['dst']['port_no']
src_mac = link['src']['hw_addr'].replace(":","")
dst_mac = link['dst']['hw_addr'].replace(":","")
src_capacity = 0.0
src_ports = self.ports[str(int(src,16))]
for port in src_ports:
if port['name'] == src_port:
src_capacity = int(port['curr_speed'])/1000
break
if src_capacity == 0.0:
print "Error - SRC Capacity cannot be 0.0"
sys.exit(-1)
dst_capacity = 0.0
dst_ports = self.ports[str(int(dst,16))]
for port in dst_ports:
if port['name'] == dst_port:
dst_capacity = int(port['curr_speed'])/1000
break
if dst_capacity == 0.0:
print "Error - DST Capacity cannot be 0.0"
sys.exit(-1)
if src_capacity <= dst_capacity:
capacity = src_capacity
else:
capacity = dst_capacity
if capacity >= self.max_capacity:
self.max_capacity = capacity
self.sip.update(str(index))
id_ = str(self.sip.hash())
self.nx_topology.add_edge(src, dst, capacity=capacity, allocated=0.0, src_port=src_port, dst_port=dst_port, src_port_no=src_port_no, dst_port_no=dst_port_no, src_mac=src_mac, dst_mac=dst_mac, flows=[], id=id_)
index = index + 1
else:
print "Error: something does not work in getting info from ryu controller"
sys.exit(-2)
def serialize(self):
with open('links.json', 'w') as outfile:
json.dump(self.nx_topology.edges(data=True), outfile, indent=4, sort_keys=True)
outfile.close()
with open('nodes.json', 'w') as outfile:
json.dump(self.nx_topology.nodes(data=True), outfile, indent=4, sort_keys=True)
outfile.close()
def run_command(args_in):
#my_seed = None
if args_in.random_seed == None:
random_seed = None
else:
random_seed = int(args_in.random_seed)
print "SEED : ", random_seed
num_nodes = int(args_in.num_nodes)
connections = int(args_in.connections)
#type_builder = "erdos-renyi"
#data = [num_nodes, 0.05, random_seed, False]
#type_builder = "waxman"
#data = [num_nodes, 0.4, 0.1, 1000, (-90, -180, 90, 180)]
#type_builder = "barabasi-albert"
data = [num_nodes, connections, random_seed]
factory = TopologyBuilderFactory()
builder = factory.getTopologyBuilder(args_in.type_builder, data)
builder.generate()
while not builder.is_connected():
print "Topology is not strongly connected"
builder.generate()
#builder.nx_topoPrint()
print "Generated a connected topology (saved in nodes.json and links.json)"
builder.serialize()
def parse_cmd_line():
parser = argparse.ArgumentParser(description="Generates topology according to different models (command line parameters have been linked only for BarabasiAlbert)")
parser.add_argument('--model', dest='type_builder', action='store', default='barabasi-albert', help='model type, default = barabasi-albert, options = erdos-renyi waxman')
parser.add_argument('--nodes', dest='num_nodes', action='store', default='100', help='number of nodes, default = 100')
parser.add_argument('--connections', dest='connections', action='store', default='1', help='number of connections parameter for barabasi-albert model, default = 1')
parser.add_argument('--seed', dest='random_seed', action='store', default=None, help='seed for the random number generator, default = None')
args = parser.parse_args()
# if len(sys.argv)==1:
# parser.print_help()
# sys.exit(1)
return args
if __name__ == '__main__':
args = parse_cmd_line()
run_command(args)
#python topologybuilder.py --model barabasi-albert --nodes 153 --connection 1 --seed 69
#python topologybuilder.py --model waxman --nodes 153
#python topologybuilder.py --model erdos-renyi --nodes 153 --seed 69<file_sep>/OSHI-SR-pusher/sr_vll_pusher.py
#!/usr/bin/python
import argparse
import sys
import os
import json
import time
pusher_cfg = {}
tableIP = 0
tableSBP = 1
def read_conf_file(path):
global pusher_cfg
print "*** Read Configuration File For SR Pusher"
if os.path.exists(path):
conf = open(path,'r')
pusher_cfg = json.load(conf)
conf.close()
else:
print "No Configuration File Find In %s" % path
sys.exit(-2)
print "*** PUSHER_CFG", json.dumps(pusher_cfg, sort_keys=True, indent=4)
def get_vll_label_from_dpid(dpid):
LABEL_MASK=0x0FFFF
LABEL_VLL=0x080000
temp = dpid.replace(":","")
temp = temp[8:]
loopback = int(temp,16)
label = (loopback & LABEL_MASK) | LABEL_VLL
return label
# Utility function for the vlls persisentce
def store_vll(name, dpid, table):
# Store created vll attributes in local ./vlls.json
datetime = time.asctime()
vllParams = {'name': name, 'Dpid':dpid, 'datetime':datetime, 'table_id':table}
stro = json.dumps(vllParams)
vllsDb = open('./sr_vlls.json','a+')
vllsDb.write(stro+"\n")
vllsDb.close()
def add_command(args):
print "*** Add Vlls From Configuration File"
print "*** Read Previous Vlls Inserted"
if os.path.exists('./sr_vlls.json'):
vllsDb = open('./sr_vlls.json','r')
vlllines = vllsDb.readlines()
vllsDb.close()
else:
vlllines={}
read_conf_file(args.path)
controllerRestIp = args.controllerRestIp
# Last 3 bits identify the SR-VLL TC
# 0x40000 -> 010|0 0000 0000 0000 0000
default_label_value = 262144
# 0x5FFFF -> 010|1 1111 1111 1111 1111
max_label_value = 393215
sw_port_label = {}
for key, vll in pusher_cfg.iteritems():
srcSW = vll[0]
dstSW = vll[1]
srcPo = vll[2]['out']['srcPort']
dstPo = vll[2]['out']['dstPort']
out_ = vll[2]['out']
in_ = vll[2]['in']
id_ = vll[2]['id']
out_path = out_['path']
in_path = in_['path']
print "out_path", out_path
print "in_path", in_path
vllExists = False
# if the vll exists in the vllDb, we don't insert the flow
for line in vlllines:
data = json.loads(line)
if data['name']==(id_):
print "Vll %s exists already Skip" % id_
vllExists = True
break
if vllExists == True:
continue
if not out_['allocated'] or not in_['allocated']:
print "Vll %s not completely allocated Skip" % id_
continue
temp_sw_port_label = {}
value = sw_port_label.get(out_path[len(out_path)-1], default_label_value)
if value > max_label_value:
print "(F) Reached MAX_LABEL_VALUE For Vll %s - Skipping" % id_
continue
temp_sw_port_label[out_path[len(out_path)-1]] = int(value)
value = value + 1
sw_port_label[out_path[len(out_path)-1]] = value
value = sw_port_label.get(in_path[len(in_path)-1], default_label_value)
if value > max_label_value:
print "(R) Reached MAX_LABEL_VALUE For Vll %s - Skipping" % id_
continue
temp_sw_port_label[in_path[len(in_path)-1]] = int(value)
value = value + 1
sw_port_label[in_path[len(in_path)-1]] = value
if srcSW == dstSW:
# Forward's Rule
command = "curl -s -d '{\"dpid\": \"%s\", \"cookie\":\"%s\", \"priority\":\"32768\", \"table_id\":%d, \"match\":{\"in_port\":\"%s\"}, \"actions\":[{\"type\":\"OUTPUT\", \"port\":\"%s\"}]}' http://%s/stats/flowentry/add" % (int(srcSW, 16), id_, tableIP, int(srcPo, 16), int(dstPo, 16), controllerRestIp)
result = os.popen(command).read()
print "*** Sent Command:", command + "\n"
# Reverse Forward's Rule
command = "curl -s -d '{\"dpid\": \"%s\", \"cookie\":\"%s\", \"priority\":\"32768\", \"table_id\":%d, \"match\":{\"in_port\":\"%s\"}, \"actions\":[{\"type\":\"OUTPUT\", \"port\":\"%s\"}]}' http://%s/stats/flowentry/add" % (int(srcSW, 16), id_, tableIP, int(dstPo, 16), int(srcPo, 16), controllerRestIp)
result = os.popen(command).read()
print "*** Sent Command:", command + "\n"
store_vll(id_, srcSW, tableIP)
else:
#Forward/in
push_ip = "{\"type\":\"PUSH_MPLS\", \"ethertype\":\"%s\"}, {\"type\":\"SET_FIELD\", \"field\":\"mpls_label\", \"value\":%s}," %("34887", temp_sw_port_label[out_path[len(out_path)-1]])
push_arp = "{\"type\":\"PUSH_MPLS\", \"ethertype\":\"%s\"}, {\"type\":\"SET_FIELD\", \"field\":\"mpls_label\", \"value\":%s}," %("34888", temp_sw_port_label[out_path[len(out_path)-1]])
i = len(out_path)-1
while i >= 0:
label = get_vll_label_from_dpid(out_path[len(out_path)-1])
push_ip = push_ip + "{\"type\":\"PUSH_MPLS\", \"ethertype\":\"%s\"}, {\"type\":\"SET_FIELD\", \"field\":\"mpls_label\", \"value\":%s}," %("34887", label)
push_arp = push_arp + "{\"type\":\"PUSH_MPLS\", \"ethertype\":\"%s\"}, {\"type\":\"SET_FIELD\", \"field\":\"mpls_label\", \"value\":%s}," %("34888", label)
i = i-1
print "*** Install Ingress Rules (FW) - LHS"
# Ingress Rule For IP
command = "curl -s -d '{\"dpid\": \"%s\", \"cookie\":\"%s\", \"priority\":\"32768\", \"table_id\":%d, \"match\":{\"in_port\":\"%s\", \"eth_type\":\"%s\"}, \"actions\":[%s {\"type\":\"GOTO_TABLE\", \"table_id\":%d}]}' http://%s/stats/flowentry/add" % (int(srcSW, 16), id_, tableIP, int(srcPo, 16), "2048", push_ip, tableSBP, controllerRestIp)
result = os.popen(command).read()
print "*** Sent Command:", command + "\n"
# Ingress Rule For ARP
command = "curl -s -d '{\"dpid\": \"%s\", \"cookie\":\"%s\", \"priority\":\"32768\", \"table_id\":%d, \"match\":{\"in_port\":\"%s\", \"eth_type\":\"%s\"}, \"actions\":[%s {\"type\":\"GOTO_TABLE\", \"table_id\":%d}]}' http://%s/stats/flowentry/add" % (int(srcSW, 16), id_, tableIP, int(srcPo, 16), "2054", push_arp, tableSBP, controllerRestIp)
result = os.popen(command).read()
print "*** Sent Command:", command + "\n"
store_vll(id_, srcSW, tableIP)
print "Install Egress Rules (RV) - LHS"
# Rule For IP
labelrv1 = temp_sw_port_label[in_path[len(in_path)-1]]
command = "curl -s -d '{\"dpid\": \"%s\", \"cookie\":\"%s\", \"priority\":\"32768\", \"table_id\":%d, \"match\":{\"eth_type\":\"%s\", \"mpls_label\":\"%s\", \"mpls_bos\":\"1\"}, \"actions\":[{\"type\":\"POP_MPLS\", \"ethertype\":\"%s\"}, {\"type\":\"OUTPUT\", \"port\":\"%s\"}]}' http://%s/stats/flowentry/add" % (int(srcSW, 16), id_, tableSBP, "34887", labelrv1, "2048", int(srcPo, 16), controllerRestIp)
result = os.popen(command).read()
print "*** Sent Command:", command + "\n"
# Rule For ARP
command = "curl -s -d '{\"dpid\": \"%s\", \"cookie\":\"%s\", \"priority\":\"32768\", \"table_id\":%d, \"match\":{\"eth_type\":\"%s\", \"mpls_label\":\"%s\", \"mpls_bos\":\"1\"}, \"actions\":[{\"type\":\"POP_MPLS\", \"ethertype\":\"%s\"}, {\"type\":\"OUTPUT\", \"port\":\"%s\"}]}' http://%s/stats/flowentry/add" % (int(srcSW, 16), id_, tableSBP, "34888", labelrv1, "2054", int(srcPo, 16), controllerRestIp)
result = os.popen(command).read()
print "*** Sent Command:", command + "\n"
store_vll(id_, srcSW, tableSBP)
#Reverse/out
push_ip = "{\"type\":\"PUSH_MPLS\", \"ethertype\":\"%s\"}, {\"type\":\"SET_FIELD\", \"field\":\"mpls_label\", \"value\":%s}," %("34887", temp_sw_port_label[in_path[len(in_path)-1]])
push_arp = "{\"type\":\"PUSH_MPLS\", \"ethertype\":\"%s\"}, {\"type\":\"SET_FIELD\", \"field\":\"mpls_label\", \"value\":%s}," %("34888", temp_sw_port_label[in_path[len(in_path)-1]])
i = len(in_path)-1
while i >= 0:
label = get_vll_label_from_dpid(in_path[len(in_path)-1])
push_ip = push_ip + "{\"type\":\"PUSH_MPLS\", \"ethertype\":\"%s\"}, {\"type\":\"SET_FIELD\", \"field\":\"mpls_label\", \"value\":%s}," %("34887", label)
push_arp = push_arp + "{\"type\":\"PUSH_MPLS\", \"ethertype\":\"%s\"}, {\"type\":\"SET_FIELD\", \"field\":\"mpls_label\", \"value\":%s}," %("34888", label)
i = i-1
print "*** Install Ingress Rules (RV) - RHS"
# Ingress Rule For IP
command = "curl -s -d '{\"dpid\": \"%s\", \"cookie\":\"%s\", \"priority\":\"32768\", \"table_id\":%d, \"match\":{\"in_port\":\"%s\", \"eth_type\":\"%s\"}, \"actions\":[%s {\"type\":\"GOTO_TABLE\", \"table_id\":%d}]}' http://%s/stats/flowentry/add" % (int(dstSW, 16), id_, tableIP, int(dstPo, 16), "2048", push_ip, tableSBP, controllerRestIp)
result = os.popen(command).read()
print "*** Sent Command:", command + "\n"
# Ingress Rule For ARP
command = "curl -s -d '{\"dpid\": \"%s\", \"cookie\":\"%s\", \"priority\":\"32768\", \"table_id\":%d, \"match\":{\"in_port\":\"%s\", \"eth_type\":\"%s\"}, \"actions\":[%s {\"type\":\"GOTO_TABLE\", \"table_id\":%d}]}' http://%s/stats/flowentry/add" % (int(dstSW, 16), id_, tableIP, int(dstPo, 16), "2054", push_arp, tableSBP, controllerRestIp)
result = os.popen(command).read()
print "*** Sent Command:", command + "\n"
store_vll(id_, dstSW, tableIP)
print "Install Egress Rules (RV) - LHS"
# Rule For IP
labelfw1 = temp_sw_port_label[out_path[len(out_path)-1]]
command = "curl -s -d '{\"dpid\": \"%s\", \"cookie\":\"%s\", \"priority\":\"32768\", \"table_id\":%d, \"match\":{\"eth_type\":\"%s\", \"mpls_label\":\"%s\", \"mpls_bos\":\"1\"}, \"actions\":[{\"type\":\"POP_MPLS\", \"ethertype\":\"%s\"}, {\"type\":\"OUTPUT\", \"port\":\"%s\"}]}' http://%s/stats/flowentry/add" % (int(dstSW, 16), id_, tableSBP, "34887", labelfw1, "2048", int(dstPo, 16), controllerRestIp)
result = os.popen(command).read()
print "*** Sent Command:", command + "\n"
# Rule For ARP
command = "curl -s -d '{\"dpid\": \"%s\", \"cookie\":\"%s\", \"priority\":\"32768\", \"table_id\":%d, \"match\":{\"eth_type\":\"%s\", \"mpls_label\":\"%s\", \"mpls_bos\":\"1\"}, \"actions\":[{\"type\":\"POP_MPLS\", \"ethertype\":\"%s\"}, {\"type\":\"OUTPUT\", \"port\":\"%s\"}]}' http://%s/stats/flowentry/add" % (int(dstSW, 16), id_, tableSBP, "34888", labelfw1, "2054", int(dstPo, 16), controllerRestIp)
result = os.popen(command).read()
print "*** Sent Command:", command + "\n"
store_vll(id_, dstSW, tableSBP)
def del_command(data):
print "*** Delete Saved Vlls and PWs"
print "*** Read Previous Vlls Inserted"
if os.path.exists('sr_vlls.json'):
vllsDb = open('sr_vlls.json','r')
lines = vllsDb.readlines()
vllsDb.close()
vllsDb = open('sr_vlls.json','w')
# Removing previously created flow from switches
# using StaticFlowPusher rest API
# currently, circuitpusher records created circuits in local file ./circuits.db
# with circuit name and list of switches
controllerRestIp = args.controllerRestIp
for line in lines:
data = json.loads(line)
sw = data['Dpid']
cookie = data['name']
table = data['table_id']
print "*** Deleting Vll: %s - Switch %s" % (cookie, sw)
command = "curl -s -d '{\"cookie\":\"%s\", \"cookie_mask\":\"%s\", \"table_id\":%d, \"dpid\":\"%s\"}' http://%s/stats/flowentry/delete 2> /dev/null" % (cookie, (-1 & 0xFFFFFFFFFFFFFFFF), table, int(sw, 16), controllerRestIp)
result = os.popen(command).read()
print "*** Sent Command:", command + "\n"
vllsDb.close()
else:
lines={}
print "*** No Vlls Inserted"
#return
def run_command(data):
if args.action == 'add':
add_command(data)
elif args.action == 'delete':
del_command(data)
def parse_cmd_line():
parser = argparse.ArgumentParser(description='Segment Routing Virtual Leased Line Pusher')
parser.add_argument('--controller', dest='controllerRestIp', action='store', default='localhost:8080', help='controller IP:RESTport, e.g., localhost:8080 or A.B.C.D:8080')
parser.add_argument('--add', dest='action', action='store_const', const='add', default='add', help='action: add')
parser.add_argument('--delete', dest='action', action='store_const', const='delete', default='add', help='action: delete')
parser.add_argument('--cfg', dest='path', action='store', default='out_flow_catalogue.json', help='configuration file: path')
args = parser.parse_args()
if len(sys.argv)==1:
parser.print_help()
sys.exit(1)
return args
if __name__ == '__main__':
args = parse_cmd_line()
run_command(args)
<file_sep>/parsers-generators/nx2t3d.py
#######################################################################################################
# nx2t3d.py
#
# Copyright (C) 2015 <NAME> - (Consortium GARR and University of Rome "Tor Vergata")
# Copyright (C) 2015 <NAME> - (CNIT and University of Rome "Tor Vergata")
# www.garr.it - www.uniroma2.it/netgroup - www.cnit.it
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# te traffic engineering
#
# @author <NAME> <<EMAIL>>
# @author <NAME> <<EMAIL>>
#######################################################################################################
"""
Converts among Networkx multidigraph and T3D formats
"""
import os
import json
import sys
import networkx as nx
import copy
import re
import siphash
#VERTEX_INFO_KEY='vertex_info' #old version
VERTEX_INFO_KEY='info' #new version
#LINK_ID_KEY = 'link_label' #old version
LINK_ID_KEY = 'id' #new version
#LINK_TYPE_KEY = 'link-type' #old version
LINK_TYPE_KEY = 'view' #new version
#NODE_TYPE_KEY = 'node-type' #old version
NODE_TYPE_KEY = 'type' #new version
def nx_2_t3d_dict(nx_topology_new, defa_node_type="", defa_link_type="", add_link_id=False):
"""converts from a Networkx multidigraph into a T3D dictionary (returns the dictionary)
if defa_node_type is !="" adds a default node type
if defa_link_type is !="" adds a default link type
if add_link_id is True, if there is no LINK_ID_KEY adds a unique link_id for each link (the id is unique among all links in the graph)
the unique link is taken from the key that identifies the link among the multilinks, but it is checked that it is unique
OPEN ISSUE: HOW DOES IT CONVERT THE UNIDIRECTIONAL LINKS IN THE MultiDiGraph INTO THE BIDIRECTIONAL LINKS IN T3D????
"""
nodes_dict = {}
#all node attributes in Networkx graph are copied in the [VERTEX_INFO_KEY][property]
for n,d in nx_topology_new.nodes_iter(data=True):
#print "!!!!node", n
#print "!!!!dictionary", d
if NODE_TYPE_KEY not in d and defa_node_type!="":
d[NODE_TYPE_KEY]= defa_node_type
nodes_dict[str(n)]={}
nodes_dict[str(n)][VERTEX_INFO_KEY]={}
nodes_dict[str(n)][VERTEX_INFO_KEY]['property']={}
if NODE_TYPE_KEY in d:
#nodes_dict[str(n)][VERTEX_INFO_KEY][NODE_TYPE_KEY]=d[NODE_TYPE_KEY]
nodes_dict[str(n)][VERTEX_INFO_KEY][NODE_TYPE_KEY]=d[NODE_TYPE_KEY]
del d[NODE_TYPE_KEY]
for key_dict in d:
#print "key_dict", key_dict
nodes_dict[str(n)][VERTEX_INFO_KEY]['property'][key_dict]=d[key_dict]
#print "!!!!nodes_dict", nodes_dict
edges_dict = {}
for source,dest,key,d in nx_topology_new.edges_iter(data=True,keys=True):
#print "!!!!!source", source, " !!!!!dest", dest
#print "!!!!edge dictionary", d
edge_str_id = str(source)+"&&"+str(dest)
if edge_str_id not in edges_dict:
edges_dict[edge_str_id]={}
edges_dict[edge_str_id]['links']=[]
link_dict={}
for key_dict in d:
link_dict[key_dict]=d[key_dict]
if LINK_ID_KEY not in link_dict and add_link_id:
link_dict['LINK_ID_KEY']=get_id(key)
if LINK_TYPE_KEY not in link_dict and defa_link_type != "":
link_dict[LINK_TYPE_KEY]=defa_link_type
#if 'capacity' in d:
# link_dict['capacity']=d['capacity']
# del d['capacity']
#if 'allocated' in d:
# link_dict['allocated']=d['allocated']
# del d['allocated']
edges_dict[edge_str_id]['links'].append (link_dict)
#print "!!!!edges_dict", edges_dict
topo_dict= dict ([("edges",edges_dict),("vertices",nodes_dict)])
return topo_dict
def nx_2_t3d_json(nx_topology_new, json_out_file):
topo_dict = nx_2_t3d_dict(nx_topology_new)
#print topo_dict
json_data = json.dumps(topo_dict, sort_keys=True, indent=4)
#print(json_data)
out_file = open(json_out_file,"w")
out_file.write(str(json_data)+"\n")
out_file.close()
def t3d_json_2_nx(nx_topology_new, json_file_in):
"""converts from a T3D JSON file to a Networkx multidigraph
if there is no LINK_ID_KEY or it is the empty string, it adds a unique link_id for each link (the id is unique among all links in the graph)
the unique link correspond to the key that identifies the link among the multilinks
"""
if json_file_in == None:
sys.exit('\n\tNo input file was specified as argument....!')
with open(json_file_in) as data_file:
t3d = json.load(data_file)
#nx_topology_new= nx.MultiDiGraph() it is not possible to redefine the variable here!!!
for node_id in t3d['vertices']:
#print "node_id: ", node_id
#print t3d['vertices'][node_id][VERTEX_INFO_KEY][NODE_TYPE_KEY]
node_dict={}
if VERTEX_INFO_KEY in t3d['vertices'][node_id]:
if 'property' in t3d['vertices'][node_id][VERTEX_INFO_KEY]:
for key_dict in t3d['vertices'][node_id][VERTEX_INFO_KEY]['property']:
node_dict[key_dict]=t3d['vertices'][node_id][VERTEX_INFO_KEY]['property'][key_dict]
if NODE_TYPE_KEY in t3d['vertices'][node_id][VERTEX_INFO_KEY]:
node_dict[NODE_TYPE_KEY]=t3d['vertices'][node_id][VERTEX_INFO_KEY][NODE_TYPE_KEY]
nx_topology_new.add_node(node_id, attr_dict=node_dict)
#print "ADDED NODE :", node_id
for adjacency_id in t3d['edges']:
#print "adjacency_id: ", adjacency_id
[id_src, id_dest] = adjacency_id.split("&&")
#print id_src, id_dest
for link_dict in t3d['edges'][adjacency_id]['links']:
#print link_dict
if LINK_ID_KEY in link_dict:
if link_dict[LINK_ID_KEY] != "":
#print "valid link id"
link_unique_id = link_dict[LINK_ID_KEY]
else:
#print "link id is a null string"
link_unique_id = get_id()
link_dict[LINK_ID_KEY] = link_unique_id
else:
#print "there is no link id"
link_unique_id = get_id()
link_dict[LINK_ID_KEY] = link_unique_id
nx_topology_new.add_edge(id_src, id_dest, key=link_unique_id)
for key_dict in link_dict:
nx_topology_new[id_src][id_dest][link_unique_id][key_dict]= link_dict[key_dict]
#print "XXXXXXXXXXXXXXXXXXx"
#for key, dict in nx_topology_new['134']['17'].iteritems():
# print key
# print dict
# print nx_topology_new['134']['17']['254']
def get_id(proposal=None):
#TODO it could be possible to replace with sip
if not hasattr(get_id, "counter"):
get_id.counter = -1 # it doesn't exist yet, so initialize it
#counter is -1 at the beginning, then it is the highest used integer
if not hasattr(get_id, "set_of_used_ids"):
get_id.set_of_used_ids = set()
#if proposal==None:
#get_id.counter += 1
#return str(get_id.counter)
#else:
if proposal!=None:
proposal_is_number = True
try:
val = int(proposal)
except ValueError:
proposal_is_number = False
if proposal_is_number:
if val > get_id.counter:
get_id.counter = val
return str(get_id.counter)
#else:
# #get_id.counter += 1
# #return str(get_id.counter)
else:
#if proposal in get_id.set_of_used_ids:
# #get_id.counter += 1
# #return str(get_id.counter)
#else:
if proposal not in get_id.set_of_used_ids:
get_id.set_of_used_ids.add(proposal)
return proposal
get_id.counter += 1
return str(get_id.counter)
<file_sep>/parsers-generators/README.md
# parsers-generators
It is a collection of tools for:
1. parsing topologies and converting them among different formats
2. generating traffic demands and evaluate some metrics
### nx2t3d.py
conversion from Networkx multidigraph to Topology3D
nx_2_t3d_dict : converts from a Networkx multidigraph into a T3D dict
nx_2_t3d_json : converts from a Networkx multidigraph into a T3D json
t3d_json_2_nx : from T3D json to a Networkx multidigraph<file_sep>/parsers-generators/flowbuilder.py
import os
import json
import sys
import random
import networkx as nx
import siphash
VLL_PUSHER_FILE_NAME = "../../Dreamer-Mininet-Extensions/vll_pusher.cfg"
FLOW_CATA_FILE_NAME = "flow_catalogue.json"
class FlowBuilderFactory:
# For from_file, controller_address is the ctrl endpoint; For random generation...
def getFlowBuilder(self, builder_type, controller_address):
if builder_type == "from_file":
return FromFileBuilder(controller_address)
elif builder_type == "random":
return None
else:
print "Builder %s Not Supported...Exit" %(type_testbed)
sys.exit(-1)
class FlowBuilder:
def __init__(self, controller_address):
self.controller_address = controller_address
print "initialized FlowBuilder with address : ", controller_address
self.flow_catalogue = {}
self.pusher_cfg = {}
# 100 Kb/s
self.ub_static_rate = 100
self.lb_static_rate = 50
key = '<KEY>'
self.sip = siphash.SipHash_2_4(key)
def cataloguePrint(self):
print json.dumps(self.flow_catalogue, sort_keys=True, indent=4)
def parseJsonToFC(self):
raise NotImplementedError("Abstract Method")
def serialize(self):
raise NotImplementedError("Abstract Method")
class FromFileBuilder(FlowBuilder):
def __init__(self, controller_address):
FlowBuilder.__init__(self, controller_address)
def parseJsonToFC(self):
path = VLL_PUSHER_FILE_NAME
if os.path.exists(path):
conf = open(path,'r')
self.pusher_cfg = json.load(conf)
conf.close()
else:
print "No Configuration File Find In %s" % path
sys.exit(-2)
self.retrieve_port_number_and_mac()
i = 0
for vll in self.pusher_cfg['vlls']:
size = random.uniform(self.lb_static_rate, self.ub_static_rate)
self.sip.update(str(i))
id_ = str(self.sip.hash())
self.flow_catalogue[i] = (vll['lhs_dpid'].replace(":",""), vll['rhs_dpid'].replace(":",""), {'out':{'size': size, 'allocated': False, 'srcPort': vll['lhs_intf'], 'dstPort':vll['rhs_intf'], 'type':'vll', 'path':[]}, 'in':{'size': size, 'allocated': False, 'srcPort': vll['rhs_intf'], 'dstPort':vll['lhs_intf'], 'type':'vll', 'path':[]}, 'id':id_})
i = i + 1
for pw in self.pusher_cfg['pws']:
size = random.uniform(self.lb_static_rate, self.ub_static_rate)
self.sip.update(str(i))
id_ = str(self.sip.hash())
self.flow_catalogue[i] = (pw['lhs_dpid'].replace(":",""), pw['rhs_dpid'].replace(":",""), {'out':{'size': size, 'allocated': False, 'srcPort': pw['lhs_intf'], 'dstPort':pw['rhs_intf'], 'srcMac': pw['lhs_mac'].replace(":",""), 'dstMac': pw['rhs_mac'].replace(":",""), 'type':'pw'}, 'in':{'size': size, 'allocated': False, 'srcPort': pw['rhs_intf'], 'dstPort':pw['lhs_intf'], 'srcMac': pw['rhs_mac'].replace(":",""), 'dstMac': pw['lhs_mac'].replace(":",""), 'type':'pw'}, 'id':id_})
i = i + 1
def retrieve_port_number_and_mac(self):
intf_to_port_number = {}
#command = "curl -s http://%s/v1.0/topology/switches | python -mjson.tool" % (self.controller_address)
command = "curl --max-time 30 -s http://%s/v1.0/topology/switches" % (self.controller_address)
result = os.popen(command).read()
if result != "":
pass
else:
print "Unable to get topology from controller at address: %s" % self.controller_address
sys.exit(-2)
parsedResult = json.loads(result)
default = None
for vll in self.pusher_cfg['vlls']:
lhs_intf = vll['lhs_intf']
lhs_dpid = vll['lhs_dpid'].replace(":","")
port_number = intf_to_port_number.get("%s-%s" % (lhs_dpid, lhs_intf), default)
if port_number == None :
for switch in parsedResult:
if switch["dpid"] == lhs_dpid:
for port in switch["ports"]:
if port["name"] == lhs_intf:
port_number = str(port["port_no"])
intf_to_port_number["%s-%s" % (lhs_dpid, lhs_intf)] = port_number
vll['lhs_intf'] = port_number
rhs_intf = vll['rhs_intf']
rhs_dpid = vll['rhs_dpid'].replace(":","")
port_number = intf_to_port_number.get("%s-%s" % (rhs_dpid, rhs_intf), default)
if port_number == None :
for switch in parsedResult:
if switch["dpid"] == rhs_dpid:
for port in switch["ports"]:
if port["name"] == rhs_intf:
port_number = str(port["port_no"])
intf_to_port_number["%s-%s" % (rhs_dpid, rhs_intf)] = port_number
vll['rhs_intf'] = port_number
for pw in self.pusher_cfg['pws']:
lhs_intf = pw['lhs_intf']
lhs_dpid = pw['lhs_dpid'].replace(":","")
port_number = intf_to_port_number.get("%s-%s" % (lhs_dpid, lhs_intf), default)
if port_number == None :
for switch in parsedResult:
if switch["dpid"] == lhs_dpid:
for port in switch["ports"]:
if port["name"] == lhs_intf:
port_number = str(port["port_no"])
intf_to_port_number["%s-%s" % (lhs_dpid, lhs_intf)] = port_number
pw['lhs_intf'] = port_number
rhs_intf = pw['rhs_intf']
rhs_dpid = pw['rhs_dpid'].replace(":","")
port_number = intf_to_port_number.get("%s-%s" % (rhs_dpid, rhs_intf), default)
if port_number == None :
for switch in parsedResult:
if switch["dpid"] == rhs_dpid:
for port in switch["ports"]:
if port["name"] == rhs_intf:
port_number = str(port["port_no"])
intf_to_port_number["%s-%s" % (rhs_dpid, rhs_intf)] = port_number
pw['rhs_intf'] = port_number
# Transform the catolgue of the flows in a nx multidigraph
def multidigraph_from_flow_catalogue(self):
nx_flows = nx.MultiDiGraph()
for flow_id, (src, dst, flow_dict) in self.flow_catalogue.iteritems():
if 'out' in flow_dict and 'size' in flow_dict['out']:
nx_flows.add_edge(src, dst, flow_id, {'size':flow_dict['out']['size'], 'path':[]})
if 'in' in flow_dict and 'size' in flow_dict['in']:
nx_flows.add_edge(dst, src, flow_id, {'size':flow_dict['in']['size'], 'path':[]})
return nx_flows
def serialize(self):
with open(FLOW_CATA_FILE_NAME, 'w') as outfile:
json.dump(self.flow_catalogue, outfile, indent=4, sort_keys=True)
outfile.close()
<file_sep>/parsers-generators/graphml2nx_ste.py
#########################
# graphml2nx.py
#########################
import os
import json
import sys
import networkx as nx
import copy
import argparse
import random
import siphash
import time
import xml.etree.ElementTree as ET
import re
import numpy as np
from distribuzioni import *
#NODE_TYPE_KEY = 'node-type' #old version
NODE_TYPE_KEY = 'type' #new version
#LINK_TYPE_KEY = 'link-type' #old version
LINK_TYPE_KEY = 'view' #new version
# L average length in bit
L = 1000
# Scale factor (Capacity and size are expressed in kb/s)
S = 1000
DEFAULT_LINK_CAPA = 60
PERCENTAGE_NODES_CORE = 0.2
pusher_cfg = {}
# operates on the topology object nx_topology_new passed by reference
# if allow_multilink is False, an edge with the same source and destination of an existing edge is not added
# by default, directed_graph_in=False this means that the input graph is assumed to be undirected and two directed links are added
# for each input link
def parse_graphml(nx_topology_new, input_file_name, defa_node_type="", defa_link_type="", allow_multilink=True, directed_graph_in=False):
if input_file_name == 'None':
sys.exit('\n\tNo input file was specified as argument....!')
xml_tree = ET.parse(input_file_name)
namespace = "{http://graphml.graphdrawing.org/xmlns}"
ns = namespace
root_element = xml_tree.getroot()
graph_element = root_element.find(ns + 'graph')
index_values_set = root_element.findall(ns + 'key')
node_set = graph_element.findall(ns + 'node')
edge_set = graph_element.findall(ns + 'edge')
id_node_city_dict = {}
id_node_country_dict = {}
id_node_link_speed_dict = {}
id_node_id_dict = {}
id_node_link_dict = {"src_id","dst_id"}
node_name_value=""
node_country_value=""
node_id_value=""
node_link_speed_value=""
node_speed_link_in_graphml=""
node_label_country_in_graphml=""
node_label_name_in_graphml=""
node_label_id_in_graphml=""
#Trova e assegna il codice degli attributi che ci interessano
for i in index_values_set:
if i.attrib['attr.name'] == "id" and i.attrib["for"] == "node":
node_label_id_in_graphml = i.attrib["id"] #d37
if i.attrib['attr.name'] == 'label' and i.attrib['for'] == 'node':
node_label_name_in_graphml = i.attrib['id'] #d33
if i.attrib['attr.name'] == 'Country' and i.attrib['for'] == 'node':
node_label_country_in_graphml = i.attrib['id'] #d30
if i.attrib['attr.name'] == 'LinkSpeed' and i.attrib['for'] == 'edge':
node_speed_link_in_graphml = i.attrib['id'] #d39
#Entra in ogni nodo del file xml
for n in node_set:
node_index_value = n.attrib['id']
data_set = n.findall(ns + 'data')
#entra negli attributi del campo nodo
for d in data_set:
if d.attrib['key'] == node_label_name_in_graphml:
#prende il nome della citta' dove e' situato il nodo
node_name_value = re.sub(r'\s+', '', d.text)
if d.attrib['key'] == node_label_country_in_graphml:
#prende il nome del paese dove e' situato il nodo
node_country_value = re.sub(r'\s+', '', d.text)
if d.attrib["key"] == node_label_id_in_graphml:
#prende l'id del nodo
node_id_value = re.sub(r'\s+', '', d.text)
#print node_id_value
#save id:data couple
#id_node_city_dict[node_index_value] = node_name_value
#id_node_country_dict[node_index_value] = node_country_value
#id_node_id_dict[node_index_value] = int(node_id_value)
id_node_id_dict[node_index_value] = str(node_id_value)
#nx_topology_new.add_node(str(node_id_value), city = node_name_value, country = node_country_value, type_node = 'core' )
nx_topology_new.add_node(str(node_id_value), city = node_name_value, country = node_country_value)
nx_topology_new.node[str(node_id_value)][NODE_TYPE_KEY]=defa_node_type
#for i in range(0, len(id_node_city_dict)):
# #Aggiungo i link nella lista
# nx_topology_new.add_node(int(id_node_id_dict[str(i)]))
# nx_topology_new.node[int(id_node_id_dict[str(i)])][NODE_TYPE_KEY]=defa_node_type
# #print "===========", nx_topology_new.node[int(id_node_id_dict[str(i)])][NODE_TYPE_KEY]
# #print 'node City = '+id_node_city_dict[str(i)]+" Country = "+ id_node_country_dict[str(i)]+" id = "+str(id_node_id_dict[str(i)])
#print "\n"
i=0
#Entra in ogni edge del file xml
for e in edge_set:
data_set = e.findall(ns + 'data')
cont = False
#Entra negli attributi del campo edge
for d in data_set:
if d.attrib['key'] == node_speed_link_in_graphml:
#salva la capacita' del link
node_link_speed_value = re.sub(r'\s+', '', d.text) # Da sistemare le unita' di misura delle capacita' della linea!!!!!!!
cont = True
if cont is False:
node_link_speed_value=DEFAULT_LINK_CAPA #Se nel xml non c'e' la capacita', di default l'ho messa a 60
id_node_link_speed_dict[i] = node_link_speed_value
src_id = e.attrib['source']
dst_id = e.attrib['target']
#print "Link tra "+str(id_node_id_dict[src_id])+" e "+str(id_node_id_dict[dst_id])+" con capacita': "+str(id_node_link_speed_dict[i])
#Carico il link
src_index= str(id_node_id_dict[src_id])
dst_index= str(id_node_id_dict[dst_id])
if (not (nx_topology_new.has_edge(src_index,dst_index)) or allow_multilink):
unique_key = get_id()
add_single_link(nx_topology_new, src_index, dst_index, unique_key, round(float(id_node_link_speed_dict[i])), defa_link_type)
if (not directed_graph_in):
if (not(nx_topology_new.has_edge(dst_index,src_index)) or allow_multilink):
unique_key = get_id()
#GENERA COLLEGAMENTI CONTRARI A QUELLI SOPRA IN MODO DA CREARE LINK BIDIREZIONALI TRA I NODI CORE
add_single_link(nx_topology_new, dst_index, src_index, unique_key, round(float(id_node_link_speed_dict[i])), defa_link_type)
i=i+1
#return is not needed as the function operates on the topology object passed by reference
def add_single_link(nx_topology_new, src_index, dst_index, unique_key, capacity_value, defa_link_type):
nx_topology_new.add_edge(src_index,dst_index, key= unique_key, capacity = capacity_value, allocated=0 ,flows=[], id=unique_key)
nx_topology_new.edge[src_index][dst_index][unique_key][LINK_TYPE_KEY] = defa_link_type
#it adds edge nodes to the nx_topology_new object
def add_edge_nodes(nx_topology_new):
global n_nodi_core
n_nodi_core = nx_topology_new.number_of_nodes()
global n_nodi_di_bordo
n_nodi_di_bordo=int((n_nodi_core*PERCENTAGE_NODES_CORE)) #ho deciso che i nodi di bordo che aggiungo sono il 10% dei nodi presi dal file xml
random.seed(10) #generatore casuale con seme per rendere ripetibile la topologia, viene usato per decidere i collegamenti dei nodi di bordo
for i in range(0,n_nodi_di_bordo):
dst= random.randrange(0,n_nodi_core-1,1)
#GENERA COLLEGAMENTI TRA NODI DI BORDO E I NODI CORE SCELTI RANDOM
nx_topology_new.add_edge(n_nodi_core+i, dst, capacity = int(random.uniform(50,200)), allocated=0, type='bordo-core' ,flows=[])
#GENERA COLLEGAMENTI CONTRARI A QUELLI SOPRA IN MODO DA CREARE LINK BIDIREZIONALI TRA I NODI DI BORDO
nx_topology_new.add_edge(dst, n_nodi_core+i, capacity = int(random.uniform(50,200)), allocated=0, type='core-bordo' ,flows=[])
#return is not needed as the function operates on the topology object passed by reference
def get_id():
#TODO it could be possible to replace with sip
if not hasattr(get_id, "counter"):
get_id.counter = -1 # it doesn't exist yet, so initialize it
get_id.counter += 1
return str(get_id.counter)
<file_sep>/parsers-generators/test_load_network.py
from test_load_network_utility import *
import random
import itertools
import time
epsilon = 1 #capacita' dei flussi, e' il passo con cui saturiamo la rete
def Test_Load_Network(nx_topology, flow_catalogue_new, list_value, Risultato_test):
seed=5
count = 0
list_node = [] #list_node [source, sink, flusso allocato] contiene tutte le coppie di nodi di bordo con il relativo flusso massimo che satura la rete
control = True
flussi_aggregati = [] #flussi_aggregati [source, sink, capacita' flussi aggregati, molteplicita'] contiene tutte le coppie di nodi di bordo con le relative capacita' dei flussi aggregati
work_flow_catalogue_new = flow_catalogue_new.copy()
for flow_id, (src, dst, flow_dict) in flow_catalogue_new.iteritems():
size = 0
multiplicity = 0
for flow_id1, (src1, dst1, flow_dict1) in work_flow_catalogue_new.iteritems():
if src == src1 and dst == dst1:
multiplicity = multiplicity + 1
size = size + flow_dict1['out']['size']
flussi_aggregati.append([src,dst,size,multiplicity])
flussi_aggregati.sort() #ordina e elimina i doppioni da flussi_aggregati
flussi_aggregati = list(flussi_aggregati for flussi_aggregati,_ in itertools.groupby(flussi_aggregati))
nx_topology_copy=nx_topology.copy()
for edge in nx_topology.nodes_iter(data=True):
if edge[1]['type_node'] == 'bordo' :
for edge1 in nx_topology_copy.nodes_iter(data=True):
if edge1[1]['type_node'] == 'bordo' and edge[0] != edge1[0] :
list_node.append([edge[0],edge1[0], 0, False]) #list_node contiene [source, sink, capacita' flusso allocato, flusso ancora allocabile]
tempo_iniziale=time.time()
passo=epsilon
print 'while'
while control:
control = False
flow_catalogue_test = Catalog_Generator(flussi_aggregati, passo, list_node, seed) #crea il flow_caqtalogue a partire da list_node
nx_flows = multidigraph_from_flow_catalogue(flow_catalogue_test)
seed = seed +1
# BIGK is the max available capacity
BIGK = 0
for edge in nx_topology.edges_iter(data = True):
if edge[2]['capacity'] > BIGK:
BIGK = edge[2]['capacity']
control = cspf(nx_topology, flow_catalogue_test, nx_flows, control, BIGK, list_node, Risultato_test) #algoritmo CSPF
nx_topology_work=nx_topology.copy()
for edge in nx_topology.edges_iter(data=True):
for edge1 in nx_topology_work.edges_iter(data=True):
if edge[0] == edge1[0] and edge[1] == edge1[1]:
edge[2]['capacity']=edge1[2]['capacity']-edge1[2]['allocated']
edge[2]['allocated'] = 0
tempo_finale=time.time()
if tempo_finale-tempo_iniziale >= 60:
passo = passo * 2
tempo_iniziale=time.time()
print passo
count = count +1
Ftot = 0 # Domanda aggregata (somma dei flussi aggregati di ogni coppia di nodi)
for fa in flussi_aggregati:
Ftot = Ftot + fa[2]
Prob_Rif = 0 # probabilita' di riufiuto
for fa in flussi_aggregati:
for fmax in list_node:
if fa[0] == fmax[0] and fa[1] == fmax[1]:
Prob_Rif = Prob_Rif + (fa[2]/fmax[2]*fa[2]/Ftot)
media_pesata_flussi = 0
somma_cap_massima = 0
Risultato_test.write("Lista dei flussi aggregati (Source, Sink, Capacita' in Mbps, Molteplicita'):\n"+str(flussi_aggregati)+"\n\n")
Risultato_test.write("Lista dei flussi che saturano la rete (Source, Sink, Capacita' in Mbps):\n"+str(list_node)+"\n\n")
Risultato_test.write("Percentuale di quanto i flussi occupano rispetto a flussi massimi che saturano la rete:\n")
for i in flussi_aggregati:
for j in list_node:
for k in list_value:
if i[0] == j[0] == k[0] and i[1] == j[1] == k[1]:
try:
Risultato_test.write("Source: "+str(i[0])+" Sink: "+str(i[1])+" Percentuale: "+str(i[2]/float(j[2])*100)+"% "+"Max Flow: "+str(k[4])+"\n")
media_pesata_flussi = media_pesata_flussi + (i[2]*j[2])
somma_cap_massima = somma_cap_massima + j[2]
except:
Risultato_test.write("Source: "+str(i[0])+" Sink: "+str(i[1])+" Flusso non allocato Max Flow: "+str(k[4])+"\n")
media_pesata_flussi = media_pesata_flussi + (i[2]*j[2])
somma_cap_massima = somma_cap_massima + j[2]
Risultato_test.write("\nPercentuale di rifiuto: "+str(Prob_Rif)+"\n")
|
1c78f644375ab532fad399c406ca02157f25a4cb
|
[
"Markdown",
"Python"
] | 15 |
Python
|
jiahao-shen/SDN-TE-SR-tools
|
a02af01fed2f116861dc03650e5de578a6ef5581
|
015d16539ed35be578172320acb65202be231713
|
refs/heads/master
|
<repo_name>normnorm/openshift-elasticsearch<file_sep>/bin/install-version
#!/bin/bash -e
if [ $# -lt 1 ]; then
echo "Usage: $0 <ELASTICSEARCH-VERSION>"
echo "Example: $0 2.3.3"
exit 1
fi
source $OPENSHIFT_ELASTICSEARCH_DIR/lib/util
_install_version $1
<file_sep>/bin/install
#!/bin/bash -e
source $OPENSHIFT_CARTRIDGE_SDK_BASH
source $OPENSHIFT_ELASTICSEARCH_DIR/lib/util
mkdir -p $OPENSHIFT_ELASTICSEARCH_DIR/run $OPENSHIFT_DATA_DIR/{elasticsearch,elasticsearch-plugins}
touch $OPENSHIFT_ELASTICSEARCH_DIR/env/OPENSHIFT_ELASTICSEARCH_CLUSTER
_install_version ${ES_VERSION}<file_sep>/README.md
OpenShift Elasticsearch Cartridge
=================================
This cartridge provides an Elasticsearch cluster as a standalone application Elasticsearch.
To create your Elasticsearch app, run:
rhc app-create https://raw.githubusercontent.com/normnorm/openshift-elasticsearch/master/metadata/manifest.yml -a <app>
If you want to create a Elasticsearch cluster, append the flag `--scaling`:
rhc app-create https://raw.githubusercontent.com/normnorm/openshift-elasticsearch/master/metadata/manifest.yml -a <app> --scaling
### Adding extra nodes to cluster
To add more nodes to the cluster, simply add more gears:
rhc cartridge-scale -a <app> elasticsearch <number of total gears you want>
### Plugins
To install Elasticsearch plugins, edit the `plugins.txt` file, commit, and push your changes.
You can also install plugins from a .zip file. Simply place it inside dir `plugins/`, git add, commit and push.
### Configuration
#### Elasticsearch
Elasticsearch configuration is built on-the-fly with the file `config/elasticsearch.yml.erb`, concatenated with any other files found in that same dir (except for `logging.yml` and `elasticsearch.in.sh`). Files ending with `.erb` will be pre-processed using ruby's erb command.
#### Nginx
Nginx is configured by editing the file `nginx.conf.erb` from the git repo's root.
#### Kibana
Kibana be accessed by `port-forward`ing to your rhc client and using a local install of kibana to access. This is a better alternative than having the kibana overhead on small gears and exposing it to the world through an open port.
#### Security
Authentication and authorization is handled by NGINX - in the intrest of keeping things simple and free - sheild is not required for basic security.
You can create a basicauth login and add it to the file:
`template/htpasswd`
An example login is present you should make this your own.
### License
This cartridge is [MIT](http://opensource.org/licenses/MIT) licensed.
<file_sep>/lib/util
#!/bin/bash
export ES_CONFIG_DIR="$OPENSHIFT_REPO_DIR/config/"
export ES_CONFIG_FILE="$ES_CONFIG_DIR/elasticsearch.yml"
export ES_CONFIG_LOG_FILE="$ES_CONFIG_DIR/logging.yml"
export ES_PID_FILE="$OPENSHIFT_ELASTICSEARCH_DIR/run/elasticsearch.pid"
export NGINX_PID_FILE="$OPENSHIFT_ELASTICSEARCH_DIR/run/nginx.pid"
export NGINX_VERSION=${NGINX_VERSION:-1.7.8}
export NGINX_EXEC="$OPENSHIFT_ELASTICSEARCH_DIR/nginx/versions/$NGINX_VERSION/bin/nginx"
export NGINX_CONFIG_FILE="$OPENSHIFT_ELASTICSEARCH_DIR/conf/nginx.conf"
export PLUGIN_CMD="$OPENSHIFT_ELASTICSEARCH_DIR/usr/bin/plugin -Des.path.plugins=$OPENSHIFT_DATA_DIR/elasticsearch-plugins"
function _install_version()
{
local VERSION=$1
if [ -d $OPENSHIFT_ELASTICSEARCH_DIR/usr ]; then
echo Removing old version $(<$OPENSHIFT_ELASTICSEARCH_DIR/usr/.version)
rm -rf $OPENSHIFT_ELASTICSEARCH_DIR/usr
fi
echo Downloading version $VERSION
mkdir $OPENSHIFT_ELASTICSEARCH_DIR/usr
curl https://download.elastic.co/elasticsearch/release/org/elasticsearch/distribution/tar/elasticsearch/$VERSION/elasticsearch-$VERSION.tar.gz | tar xzf - --strip-components=1 -C $OPENSHIFT_ELASTICSEARCH_DIR/usr
}
function _is_scalable()
{
if [ "${OPENSHIFT_APP_DNS}" != "${OPENSHIFT_GEAR_DNS}" ]; then
return 0
elif [ -n "${OPENSHIFT_HAPROXY_DIR}" ]; then
return 0
fi
return 1
}
function _install_plugins()
{
local old_plugins=$($PLUGIN_CMD list | awk '/-/{print $2}' | xargs)
if [ -n "$old_plugins" -a "$old_plugins" != "No" ]; then #ARGH!
echo "Removing old ElasticSearch plugins..."
for plugin in $old_plugins; do
$PLUGIN_CMD remove $plugin
done
fi
echo "Installing ElasticSearch plugins..."
local plugins="$(grep -v '^#' $OPENSHIFT_REPO_DIR/plugins.txt 2>/dev/null | xargs)"
if [ "${plugins}" ]; then
for plugin in ${plugins}; do
local name=$(echo $plugin | cut -f 1 -d =)
local url=$(echo $plugin | cut -f 2 -d =)
if [ "$name" == "$url" ]; then
$PLUGIN_CMD install $name
else
$PLUGIN_CMD install $url
fi
done
fi
}
# Only build elasticsearch config here
function _build_config()
{
_is_scalable && local S=1 || local S=0
# Process all ERB templates first
for FILE in $ES_CONFIG_DIR/*.erb; do
[ -f "$FILE" ] || continue
SCALABLE=$S oo-erb "$FILE" > "${FILE%.erb}"
done
# Append all config files to main config
for FILE in $ES_CONFIG_DIR/*; do
[ -f "$FILE" ] || continue
[ "${FILE/*.erb/}" ] || continue
[ "${FILE/*.sh/}" ] || continue
[ "${FILE##*/}" == "${ES_CONFIG_FILE##*/}" ] && continue
[ "${FILE##*/}" == "${ES_CONFIG_LOG_FILE##*/}" ] && continue
echo
echo "#### Include from $FILE"
echo
SCALABLE=$S oo-erb "$FILE"
done >> "${ES_CONFIG_FILE}"
}
function _es_heap_size()
{
# Load how much percent of the gear memory should be assigned to ES
# Value is read from marker file es_heap_size_percent
local DEFAULT_PERCENT=90
if [ -r "$OPENSHIFT_REPO_DIR/.openshift/markers/es_heap_size_percent" ]; then
local PERCENT=$(<$OPENSHIFT_REPO_DIR/.openshift/markers/es_heap_size_percent)
[[ "$PERCENT" -eq "$PERCENT" ]] 2>/dev/null || PERCENT=$DEFAULT_PERCENT
else
local PERCENT=$DEFAULT_PERCENT
fi
# Ensure sane limits
[ "$PERCENT" -gt 100 ] && PERCENT=100
[ "$PERCENT" -lt 50 ] && PERCENT=50
echo -n $(($OPENSHIFT_GEAR_MEMORY_MB/100*$DEFAULT_PERCENT))m
}
<file_sep>/bin/setup
#!/bin/bash -e
if [ ${ES_VERSION} ]; then
echo $ES_VERSION
else
echo 2.3.3
fi > $OPENSHIFT_ELASTICSEARCH_DIR/env/ES_VERSION
echo $OPENSHIFT_REPO_DIR >> $OPENSHIFT_ELASTICSEARCH_DIR/env/ES_HOME
|
43c1bd5377be90e1430040209b2015f2efba578b
|
[
"Markdown",
"Shell"
] | 5 |
Shell
|
normnorm/openshift-elasticsearch
|
1b012c850b423c1e7b480e5cc3b6ca42ce3c9fa8
|
b286e4288bbca873d0e44e85a6a7c04af067a5a6
|
refs/heads/main
|
<file_sep>rootProject.name = "Internship Application"
include ':app'
<file_sep>package ua.kpi.comsys.internshipapplication;
import android.content.Context;
import android.content.Intent;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.TextView;
import androidx.annotation.NonNull;
import androidx.recyclerview.widget.RecyclerView;
import com.squareup.picasso.Picasso;
import java.util.List;
import fr.tkeunebr.gravatar.Gravatar;
import ua.kpi.comsys.internshipapplication.models.Contact;
public class RecyclerViewAdapter extends RecyclerView.Adapter<RecyclerViewAdapter.MyViewHolder> {
private final List<Contact> contacts;
private final Context context;
private final int mAvatarImageViewPixelSize;
public RecyclerViewAdapter(Context context, List<Contact> contacts) {
this.context = context;
this.contacts = contacts;
mAvatarImageViewPixelSize = context.getResources().getDimensionPixelSize(R.dimen.avatar_image_view_size);
}
@NonNull
@Override
public MyViewHolder onCreateViewHolder(@NonNull ViewGroup parent, int viewType) {
View view = LayoutInflater.from(parent.getContext()).inflate(R.layout.contact_in_grid, parent, false);
return new MyViewHolder(view);
}
@Override
public void onBindViewHolder(@NonNull MyViewHolder holder, int position) {
String gravatarUrl = Gravatar.init().with(contacts.get(position).getEmail()).force404().size(mAvatarImageViewPixelSize).build();
Picasso.with(context)
.load(gravatarUrl)
.placeholder(R.drawable.ic_contact_picture)
.error(R.drawable.ic_contact_picture)
.into(holder.imageView);
if (contacts.get(position).getStatus().equals("Online")) {
holder.textView.setVisibility(View.VISIBLE);
}
}
@Override
public int getItemCount() {
return contacts.size();
}
public class MyViewHolder extends RecyclerView.ViewHolder implements View.OnClickListener {
ImageView imageView;
TextView textView;
public MyViewHolder(@NonNull View itemView) {
super(itemView);
imageView = itemView.findViewById(R.id.contact_image_in_grid);
textView = itemView.findViewById(R.id.badge_notification_2);
itemView.setOnClickListener(this);
}
@Override
public void onClick(View v) {
Contact contact = (Contact) contacts.get(getLayoutPosition());
Intent intent = new Intent(context, ContactInfo.class);
intent.putExtra("main info", contact.getMainInfo());
intent.putExtra("email", contact.getEmail());
context.startActivity(intent);
}
}
}
|
cfc7de1ba5722c0e7f6c7fb3cf303cf2af114dcc
|
[
"Java",
"Gradle"
] | 2 |
Gradle
|
Katya-art/InternshipApplication
|
c5d2804d5900fa86a0c4c569f169d6773e27b0c6
|
653e5f9be4023975df37e781c177b70220747911
|
refs/heads/master
|
<file_sep><!DOCTYPE html>
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta charset="iso-8859-2" />
<meta name="author" content="MD"/>
<meta name="keywords" content="supit,všpr,računarstvo,vježbe"/>
<meta name="description" content="Vježbe ishoda učenja 1.Tema:HTML" />
<title>Supit - zadatak 5</title>
</head>
<body>
<img src="banner.jpg" alt="banner" />
<h1>SUPIT</h1>
<h2>Vježbe 02, zadatak 5</h2>
<hr />
<h4 id="auto1">Auto 1</h4>
<p align="spotify">
<img src="auto1.jpg" alt="auto1" width="100px" height="100px" border="10px" hspace="10px" align="left"/>
Lorem ipsum dolor sit amet, consectetur adipisicing elit,
sed do eiusmod tempor incididunt ut labore et dolore magna
aliqua. Ut enim ad minim veniam, quis nostrud exercitation
ullamco laboris nisi ut aliquip ex ea commodo consequat.
Duis aute irure dolor in reprehenderit in voluptate velit
esse cillum dolore eu fugiat nulla pariatur. Excepteur sint
occaecat cupidatat non proident, sunt in culpa qui officia
deserunt mollit anim id est laborum
</p>
<br />
<a href="http://www.vw.com">Više...</a>
<br />
<br />
<h4 id="auto2">Auto 2</h4>
<p align="spotify">
<img src="auto2.jpg" alt="auto2" width="100px" height="100px" border="10px" hspace="10px" align="left"/>
Lorem ipsum dolor sit amet, consectetur adipisicing elit,
sed do eiusmod tempor incididunt ut labore et dolore magna
aliqua. Ut enim ad minim veniam, quis nostrud exercitation
ullamco laboris nisi ut aliquip ex ea commodo consequat.
Duis aute irure dolor in reprehenderit in voluptate velit
esse cillum dolore eu fugiat nulla pariatur. Excepteur sint
occaecat cupidatat non proident, sunt in culpa qui officia
deserunt mollit anim id est laborum
</p>
<br />
<a href="http://www.vw.com">Više...</a>
<br />
<br />
<h4 id="auto3">Auto 3</h4>
<p align="spotify">
<img src="auto3.jpg" alt="auto3" width="100px" height="100px" border="10px" hspace="10px" align="left"/>
Lorem ipsum dolor sit amet, consectetur adipisicing elit,
sed do eiusmod tempor incididunt ut labore et dolore magna
aliqua. Ut enim ad minim veniam, quis nostrud exercitation
ullamco laboris nisi ut aliquip ex ea commodo consequat.
Duis aute irure dolor in reprehenderit in voluptate velit
esse cillum dolore eu fugiat nulla pariatur. Excepteur sint
occaecat cupidatat non proident, sunt in culpa qui officia
deserunt mollit anim id est laborum
</p>
<br />
<a href="http://www.vw.com">Više...</a>
<br />
<br />
<h4 id="auto4">Auto 4</h4>
<p align="spotify">
<img src="auto4.jpg" alt="auto4" width="100px" height="100px" border="10px" hspace="10px" align="left"/>
Lorem ipsum dolor sit amet, consectetur adipisicing elit,
sed do eiusmod tempor incididunt ut labore et dolore magna
aliqua. Ut enim ad minim veniam, quis nostrud exercitation
ullamco laboris nisi ut aliquip ex ea commodo consequat.
Duis aute irure dolor in reprehenderit in voluptate velit
esse cillum dolore eu fugiat nulla pariatur. Excepteur sint
occaecat cupidatat non proident, sunt in culpa qui officia
deserunt mollit anim id est laborum
</p>
<br />
<a href="http://www.vw.com">Više...</a>
<br />
<br />
<h4 id="auto5">Auto 5</h4>
<p align="spotify">
<img src="auto5.jpg" alt="auto5" width="100px" height="100px" border="10px" hspace="10px" align="left"/>
Lorem ipsum dolor sit amet, consectetur adipisicing elit,
sed do eiusmod tempor incididunt ut labore et dolore magna
aliqua. Ut enim ad minim veniam, quis nostrud exercitation
ullamco laboris nisi ut aliquip ex ea commodo consequat.
Duis aute irure dolor in reprehenderit in voluptate velit
esse cillum dolore eu fugiat nulla pariatur. Excepteur sint
occaecat cupidatat non proident, sunt in culpa qui officia
deserunt mollit anim id est laborum
</p>
<br />
<a href="http://www.vw.com">Više...</a>
<br />
<br />
<h4 id="auto6">Auto 6</h4>
<p align="spotify">
<img src="auto6.jpg" alt="auto6" width="100px" height="100px" border="10px" hspace="10px" align="left"/>
Lorem ipsum dolor sit amet, consectetur adipisicing elit,
sed do eiusmod tempor incididunt ut labore et dolore magna
aliqua. Ut enim ad minim veniam, quis nostrud exercitation
ullamco laboris nisi ut aliquip ex ea commodo consequat.
Duis aute irure dolor in reprehenderit in voluptate velit
esse cillum dolore eu fugiat nulla pariatur. Excepteur sint
occaecat cupidatat non proident, sunt in culpa qui officia
deserunt mollit anim id est laborum
</p>
<br />
<a href="http://www.vw.com">Više...</a>
<br />
<br />
<hr />
<p align="center">
<a href="#auto1">Auto 1</a> |
<a href="#auto2">Auto 2</a> |
<a href="#auto3">Auto 3</a> |
<a href="#auto4">Auto 4</a> |
<a href="#auto5">Auto 5</a> |
<a href="#auto6">Auto 6</a>
</p>
<hr />
<p align="center">
© Racunarstvo
</p>
</body>
</html>
<file_sep>window.onload = function () {
document.getElementById("btnLoadID").onclick = function () {
LoadNewPicture();
}
}
function LoadNewPicture() {
var slika = document.getElementsByTagName("img")[0];
var putanja = document.getElementById("pathID").value;
if (putanja != "")
slika.setAttribute("src", "../Pictures/" + putanja);
}<file_sep>function Start() {
do {
IzborGeometrijskogTijela();
} while (confirm("Ponovi?"));
document.write("Gotovo");
}
function IzborGeometrijskogTijela() {
var odabir = Number(prompt("1-kocka, 2-kugla, 3-valjak", "1"));
switch (odabir) {
case 1:
Kocka();
break;
case 2:
Kugla();
break;
case 3:
Valjak();
break;
default:
document.write("Krivi unos");
}
}
function Kocka() {
var a, oplosje, volumen;
do {
a = Number(prompt("Duljina stranice:", ""));
} while (isNaN(a));
oplosje = 6 * Math.pow(a, 2);
volumen = Math.pow(a, 3);
Ispis("Kocka: ", volumen.toFixed(2), oplosje.toFixed(2));
}
function Kugla() {
var r, oplosje, volumen;
do {
r = Number(prompt("Duljina radiusa:", ""));
} while (isNaN(r));
oplosje = 4 * r * r * Math.PI;
volumen = (4 / 3.0) * r * r * r * Math.PI;
Ispis("Kugla: ", volumen.toFixed(2), oplosje.toFixed(2));
}
function Valjak() {
var v, r, oplosje, volumen;
do {
r = Number(prompt("Duljina radiusa:", ""));
v = Number(prompt("Visina: ", ""));
} while (isNaN(r) || isNaN(v));
oplosje = 2 * r * Math.PI * (r + v);
volumen = r * r * Math.PI * v;
Ispis("Kocka: ", volumen.toFixed(2), oplosje.toFixed(2));
}
function Ispis(ime, volumen, oplosje) {
var ispis = ime + ": V=" + volumen + ", O=" + oplosje + "\n";
alert(ispis);
}<file_sep><<<<<<< HEAD
window.onload = function () {
//fokusiraj se na txtartikl
document.getElementById("txtArtikl").focus();
//napravi varijable za 3 gumba
var gumbPocetak = document.getElementById("btnPocetak");
var gumbKraj = document.getElementById("btnKraj");
var gumbObrisi = document.getElementById("btnObrisi");
//napravi onclick evente za sve gumbe
gumbPocetak.onclick = function () {
DodajPocetak();
}
gumbKraj.onclick = function () {
DodajKraj();
}
gumbObrisi.onclick = function () {
Obrisi();
}
}
function DodajPocetak() {
if (document.getElementById("txtArtikl").value != "") {
//Stvaranje novog list itema
var li = document.createElement("li");
var liTekst = document.createTextNode(ImeUnosa());
li.appendChild(liTekst);
//Dodavanje u listu
var ulista = document.getElementById("listaArtikala");
var pozicija = ulista.getElementsByTagName("li")[0];
if (pozicija == undefined)
ulista.appendChild(li);
else
ulista.insertBefore(li, pozicija);
}
}
function DodajKraj() {
if (document.getElementById("txtArtikl").value != "") {
//Stvaranje novog list itema
var li = document.createElement("li");
var liTekst = document.createTextNode(ImeUnosa());
li.appendChild(liTekst);
//Dodavanje na kraj liste
var ulista = document.getElementById("listaArtikala");
ulista.appendChild(li);
}
}
function Obrisi() {
//Dohvati index i parsiraj ga u int
var index = parseInt(document.getElementById("txtIndex").value);
//Poberi sve li u unordered listi
var ul = document.getElementById("listaArtikala");
//Ukloni onaj list item koji je na indexu
var ukloniti = ul.getElementsByTagName("li")[index];
ul.removeChild(ukloniti);
}
function ImeUnosa() {
var unos = document.getElementById("txtArtikl");
return unos.value;
=======
function AppendHeaders() {
console.log(document.getElementsByTagName("h1").length);
>>>>>>> origin/master
}<file_sep><!DOCTYPE html>
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<title>Web Q2</title>
<style type="text/css">
body {
background-color:#87B3E4;
line-height:1.5em;
font-family:Calibri;
color:white;
}
h1 {
font-size:3em;
text-decoration:underline;
text-transform:uppercase;
}
h2 {
color:yellow;
font-style:italic;
text-transform:capitalize;
}
p {
font-size:1.1em;
color:black;
}
div {
margin-top:80px;
text-align:center;
}
span {
color:white;
font-style:italic;
font-weight:bold;
}
</style>
</head>
<body>
<h1>CSS Practice1</h1>
<h2>Basic information</h2>
<p>
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod
tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam,
quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat.
</p>
<h2>Other information</h2>
<p>
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod
tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam,
quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo <span>consequat.</span>
</p>
<p>
<div>©MD</div>
</p>
</body>
</html>
<file_sep>function NameInput() {
var nameArray = new Array();
do {
nameArray.push(prompt("Name:", ""));
} while (confirm("More names?"));
var choice = confirm("Print ascending order? No = Descending.");
if (choice == true) {
document.write(nameArray.sort().join(", "));
}
else {
document.write(nameArray.sort().reverse().join(", "));
}
}
function StudentInput() {
var students = new Array();
do {
var student = new Array();
var studentName = prompt("Name:", "");
var studentLastName = prompt("Last name:", "");
var studentEmail = prompt("E-mail:", "");
student.push(studentName);
student.push(studentLastName);
student.push(studentEmail);
students.push(student);
} while (confirm("more?"));
var outputString = "";
for (var i = 0; i < students.length; i++) {
outputString += students[i].join(", ");
outputString += "<br />";
}
document.write(outputString);
}<file_sep>function NameInput() {
var names = new Array();
do {
names.push(prompt("Name and last name:", ""));
} while (confirm("more?"));
var output = "";
for (var i = 0; i < names.length; i++) {
output += names[i].substring(0, names[i].indexOf(" ", 0));
output += ", ";
}
document.write(output);
}<file_sep><!DOCTYPE html>
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<title></title>
</head>
<body style="font-family: Calibri; background-color: #87B3E4; color: white;">
<h1 style="font-size: 3em; text-transform:uppercase;text-decoration:underline;">CSS Practice6</h1>
<h2 style="text-transform:capitalize; color:yellow;font-style:italic;">Basic information</h2>
<p style="font-size: 1.1em; color:black; line-height:1.5em">
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod
tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam,
quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat.
</p>
<h2 style="text-transform:capitalize;color:yellow;font-style:italic;">Other information</h2>
<p style="font-size: 1.1em; color:black;">
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod
tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam,
quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo
<span style="color:white; font-style:italic;">consequat.</span>
</p>
<p style="text-align:center;margin-top:80px;">
©MD
</p>
</body>
</html>
<file_sep>var ocjena;
var tekst;
do {
ocjena = prompt("Ocjena:", "");
} while (isNaN(ocjena));
switch (Number(ocjena)) {
case 1:
tekst = "Nedovoljan";
break;
case 2:
tekst = "Dovoljan";
break;
case 3:
tekst = "Dobar";
break;
case 4:
tekst = "<NAME>";
break;
case 5:
tekst = "Odličan";
break;
default:
tekst = "<NAME>";
}
document.write(tekst);<file_sep><!DOCTYPE html>
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<title></title>
</head>
<body>
<fieldset>
<form>
<input type="text" name="txtValue" value="sda" /> type="text"
<br />
<br />
<input type="password" name="pass" /> type="password"
<br />
<br />
<input type="radio" name="imeGrupe" value="CD" />
<input type="radio" name="imeGrupe" value="DVD" checked="checked" />
<input type="radio" name="imeGrupe" value="BR" />
type="radio" name="imeGrupe"
<br />
<br />
<input type="checkbox" name="imeDrugeGrupe" value="CD" />
<input type="checkbox" name="imeDrugeGrupe" value="DVD" checked="checked" />
<input type="checkbox" name="imeDrugeGrupe" value="BR" checked />
type="checkbox" name="imeDrugeGrupe"
<br />
<br />
<input type="submit" name="gumbi" value="submit"/> type="submit" name="gumbi"
<br />
<br />
<input type="reset" name="gumbi" value="reset" /> type="reset"
<br />
<br />
<input type="button" name="gumbi" value="button" /> type="button"
<br />
<br />
<textarea rows="10" cols="50">
textarea rows="10" cols="50"
</textarea>
<br />
<br />
<select multiple size="4">
<option selected>Golf</option>
<option>Audi</option>
<option>BMW</option>
</select> select multiple size="4"
<br />
<br />
<select>
<option>Tesla</option>
<optgroup label="VW">
<option>Polo</option>
<option>Golf</option>
<option>Passat</option>
</optgroup>
<optgroup label="Audi">
<option>A1</option>
<option>A3</option>
<option>A4</option>
</optgroup>
</select> select, optgroup label="Audi"
<br />
<br />
<label for="ID_A">A Opcija</label>
<input type="radio" name="opcija" value="A" id="ID_A"/>
<label for="ID_B">B Opcija</label>
<input type="radio" name="opcija" value="B" id="ID_B"/> => Prvo label tag sa for vrijednosti na ID input taga koji slijedi
<br /><br />
Drugi nacin:
<br /><br />
<label>
Opcija A
<input type="radio" name="opcija" value="A" checked/>
</label>
<label>
Opcija B
<input type="radio" name="opcija" value="B" />
</label> => Label tag u koji stavimo plain text Opcija A i zatim input tag
<br />
<hr />
<legend>Osobni podaci <- Legend tag</legend>
<label>
Ime:
<input type="text" name="ime" value="" />
<br />
</label>
<label>
Prezime:
<input type="text" name="prezime" value="" />
<br />
</label>
<input type="submit" name="posalji" value="posalji" /> Submit gumb
</form> Kraj forme
</fieldset> Kraj fieldseta
</body>
</html>
|
cb53631c537af3b66ae6e4353c927dd58791e90f
|
[
"JavaScript",
"HTML"
] | 10 |
HTML
|
matijaderk/Web_Technologies
|
9b9aedf04d1b94d197e2c601968d66f71be698d1
|
d106665e6c0f3ef0e0230f0114ded1173b5747de
|
refs/heads/master
|
<file_sep>package OCA;
public class TestClass {
static int si = 10;
static int si2 = 20;
public static void main(String args[]) {
new TestClass();
}
public TestClass() {
System.out.println(this);
}
public String toString() {
return "TestClass.si = " + this.si + this.si2;
}
}<file_sep>package JavaInterPrograms;
public class FactorialOfANumber {
public static void main(String... ags) {
int result = 1;
int num = 10;
for (int i = num; i >= 1; i--) {
result = result * i;
}
System.out.println(result);
System.out.println("********************");
System.out.println(fact(num));
}
public static int fact(int num) {
if (num == 0) return 1;
return (num * fact(num - 1));
}
}
<file_sep>public class Employee {
String name;
String address;
String phoneNumber;
float experience;
void punchIn(){
System.out.println("Punch In");
}
public static void main(String... args) {
Demo demo2 = Demo.checkAndReturnObject();
Demo demo3 = Demo.checkAndReturnObject();
}
}
<file_sep>package Bindings;
public class Programmer extends Employee {
static String name = "Programmer";
static void printname(){
System.out.println(name);
}
}<file_sep>package tests;
import lib._Base;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import com.jayway.restassured.specification.RequestSpecification;
import com.jayway.restassured.builder.RequestSpecBuilder;
import static com.jayway.restassured.RestAssured.given;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.hasItem;
import static org.hamcrest.CoreMatchers.not;
public class ToDosTest{
//Base class to be used for creating of new ToDos
private class NewToDoDetails{
int userId;
String title;
boolean completed;
NewToDoDetails(int userId, String title, boolean completed){
this.userId = userId;
this.title = title;
this.completed = completed;
}
}
//Base class to be used for updation of existing todo
private class ExistingToDoDetails{
int id;
int userId;
String title;
boolean completed;
ExistingToDoDetails(int id, int userId, String title, boolean completed){
this.userId = userId;
this.title = title;
this.completed = completed;
}
}
_Base base;
RequestSpecification requestSpec;
@BeforeClass
public void beforeClass(){
base = new _Base();
//Load properties files
base.loadConfig();
base.loadTestData();
requestSpec = new RequestSpecBuilder().
setBaseUri(base.getConfig("baseURI")).
setBasePath("todos").
build();
}
@DataProvider(name="getToDosIDs")
public String[][] getToDosIDs(){
//Parameterize test with set of Ids for data driven testing
String strToDosIDs = base.getTestData("todo.idList");
String[] tmpArr = strToDosIDs.split(",");
String[][] arrToDosIDs = new String[tmpArr.length][1];
for(int i=0; i<tmpArr.length; i++){
arrToDosIDs[i][0] = tmpArr[i];
}
return arrToDosIDs;
}
@Test(dataProvider="getToDosIDs")
public void viewToDosForDifferentIDs(String id) {
given().
spec(requestSpec).
when().
get(id).
then().
statusCode(Integer.valueOf(base.getConfig("successStatusCode"))).
body("id",equalTo(Integer.valueOf(id)));
}
@Test
public void viewToDosForInvalidId() {
given().
spec(requestSpec).
when().
get(base.getTestData("todo.invalidId")).
then().
statusCode(Integer.valueOf(base.getConfig("notFoundStatusCode"))).
body("id",not(hasItem("id")));
}
@Test
public void viewToDosDetailsForAParticularID() {
String id = base.getTestData("todo.id");
given().
spec(requestSpec).
when().
get(id).
then().
statusCode(Integer.valueOf(base.getConfig("successStatusCode"))).
body("userId",equalTo(Integer.valueOf(base.getTestData("todo.userId")))).
body("id",equalTo(Integer.valueOf(id))).
body("title",equalTo(base.getTestData("todo.title"))).
body("completed",equalTo(Boolean.valueOf(base.getTestData("todo.completed"))));
}
@Test
public void createToDos(){
int userId = Integer.valueOf(base.getTestData("todo.userId"));
String title = base.getTestData("todo.title");
boolean completed = Boolean.valueOf(base.getTestData("todo.completed"));
NewToDoDetails todoDetails = new NewToDoDetails(userId, title, completed);
given().
spec(requestSpec).
body(todoDetails.toString()).
when().
post().
then().
statusCode(Integer.valueOf(base.getConfig("createdStatusCode"))).
body("id", equalTo(Integer.valueOf(base.getTestData("todo.newId"))));
}
@Test
public void createToDosInvalidRequest(){
//Do not pass json as request
given().
spec(requestSpec).
//body(todoDetails.toString()).
when().
post().
then().
statusCode(Integer.valueOf(base.getConfig("unsupportedMediaTypeStatusCode")));
}
@Test
public void updateToDos(){
int id = Integer.valueOf(base.getTestData("todo.idToUpdate"));
int userId = Integer.valueOf(base.getTestData("todo.userId"));
String title = "Updated title.";
boolean completed = false;
ExistingToDoDetails todoDetails = new ExistingToDoDetails(id, userId, title, completed);
given().
spec(requestSpec).
body(todoDetails.toString()).
when().
put(String.valueOf(id)).
then().
statusCode(Integer.valueOf(base.getConfig("successStatusCode"))).
body("id", equalTo(id));
}
@Test
public void updateToDosInvalidRequest(){
int id = Integer.valueOf(base.getTestData("todo.invalidId"));
int userId = Integer.valueOf(base.getTestData("todo.userId"));
String title = "Updated title.";
boolean completed = false;
ExistingToDoDetails todoDetails = new ExistingToDoDetails(id, userId, title, completed);
given().
spec(requestSpec).
body(todoDetails.toString()).
when().
put(String.valueOf(id)).
then().
statusCode(Integer.valueOf(base.getConfig("notFoundStatusCode")));
}
@Test
public void deleteToDos() {
String id = base.getTestData("todo.id");
given().
spec(requestSpec).
when().
delete(id).
then().
statusCode(Integer.valueOf(base.getConfig("successStatusCode")));
}
@Test
public void deleteToDosInvalidRequest() {
String id = base.getTestData("todo.invalidId");
given().
spec(requestSpec).
when().
delete(id).
then().
statusCode(Integer.valueOf(base.getConfig("notFoundStatusCode")));
}
}
<file_sep>public abstract class AbsClassConstructor {
AbsClassConstructor(int d) {
System.out.println(d);
}
}
<file_sep>package HackerRank;
import java.util.*;
import java.io.*;
class Solution1{
static int getValue(int a,int b, int n) {
int sum = a;
for (int i = 0; i < n; i++) {
}
return sum;
}
public static void main(String []argh){
Scanner in = new Scanner(System.in);
int t = 0;
int a = 0;
int b = 0;
int n = 0;
t = in.nextInt();
for(int i=0;i<t;i++) {
a = in.nextInt();
b = in.nextInt();
n = in.nextInt();
for(int j =0;j<n;j++) {
System.out.print(getValue(a,b,j)+" ");
}
System.out.println();
}
}
}
<file_sep>package JavaInterPrograms;
public class FindMissingNumber {
public static void main(String... args) {
int[] i = {1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 12, 13, 14};
int sum1 = 0;
int sum2 = 0;
for (int j = 0; j < i.length; j++) {
sum1 = sum1 + i[j];
}
System.out.println(sum1);
for (int j = 0; j <= i.length + 1; j++) {
sum2 = sum2 + j;
}
System.out.println(sum2);
System.out.println("Missing number is ::" + (sum2 - sum1));
}
}<file_sep>package PracticePrograms;
class TempProgram {
static void myMethod() throws Exception {
throw new Exception("error message");
}
public static void main(String... args) throws Exception {
try {
// System.exit(0);
throw new Exception("error message");
} catch (Exception e) {
System.out.println("Adil's try catch " + e.getMessage());
} finally {
System.out.println("Adil Finally");
}
myMethod();
}
}<file_sep>package Shajahan;
public class Counter {
private static int count = 0;
private int nonStaticcount = 0;
Counter() {
count++;
nonStaticcount++;
}
public int getCount() {
return count;
}
public int getNonStaticcount() {
return nonStaticcount;
}
public static void main(String args[]) {
Counter countObj1 = new Counter();
Counter countObj2 = new Counter();
System.out.println("Static count for Obj1: " + countObj1.getCount());
System.out.println("NonStatic count for Obj1: " + countObj1.getNonStaticcount());
System.out.println("Static count for Obj2: " + countObj2.getCount());
System.out.println("NonStatic count for Obj2: " + countObj2.getNonStaticcount());
}
}<file_sep>package Mala;
public class Manager implements Interviewer {
public String submitInterviewStatus() {
System.out.println("Manager : Accepted");
return null;
}
}<file_sep>package Naveen.Collections;
import PracticePrograms.StaticBlockDemo;
import java.util.ArrayList;
import java.util.Iterator;
public class ArrayListNaveen {
public static <E> void main(String... args) {
int[] a = new int[3];
ArrayList ar = new ArrayList();
ar.add(10);
ar.add(20);
ar.add(30);
ar.add(40);
ar.add(54.33);
ar.add('a');
ar.add("Adil");
ar.add(true);
System.out.println(ar.size());
System.out.println(ar.get(4));
ArrayList<Integer> ar1 = new ArrayList<Integer>();
ar1.add(1000);
ArrayList<Employee> ar4 = new ArrayList<Employee>();
Employee e1 = new Employee("Adil", 38, "QA");
Employee e2 = new Employee("Ansar", 39, "QA2");
Employee e3 = new Employee("Shajeer", 40, "QA3");
ar4.add(e1);
ar4.add(e2);
ar4.add(e3);
Iterator<Employee> iterator = ar4.iterator();
while (iterator.hasNext()) {
Employee emp = iterator.next();
System.out.println("With iterator:: " + emp.name);
System.out.println("With iterator:: " + emp.age);
System.out.println("With iterator:: " + emp.role);
System.out.println("^^^^^^^^^^^^^^^^^^^^^^^^^^^");
System.out.println("^^^^^^^^^^^^^^^^^^^^^^^^^^^");
}
//Method 2
for (Employee e : ar4) {
System.out.println("With for each: " + e.name);
System.out.println("With for each: " + e.age);
System.out.println("With for each: " + e.role);
System.out.println("^^^^^^^^^^^^^^^^^^^^^^^^^^^");
}
StaticBlockDemo staticBlockDemo = new StaticBlockDemo();
StaticBlockDemo staticBlockDemo1 = new StaticBlockDemo();
StaticBlockDemo staticBlockDemo2 = new StaticBlockDemo();
StaticBlockDemo staticBlockDemo3 = new StaticBlockDemo();
ArrayList<String> ar7 = new ArrayList<String>();
ArrayList<String> ar8 = new ArrayList<String>();
ar7.add("test");
ar7.add("selenium");
ar7.add("QTP");
ar8.add("test");
ar8.add("Java");
ar7.retainAll(ar8);
for (String s : ar7) {
System.out.println(s);
}
}
}<file_sep>package Naveen;
import io.github.bonigarcia.wdm.WebDriverManager;
import org.openqa.selenium.By;
import org.openqa.selenium.Keys;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.chrome.ChromeDriver;
import java.util.concurrent.TimeUnit;
public class SendCharSequence {
public static void main(String... args) {
WebDriverManager.chromedriver().setup();
WebDriver driver = new ChromeDriver();
driver.get("https://app.hubspot.com/login");
driver.manage().timeouts().implicitlyWait(30, TimeUnit.SECONDS);
WebElement email = driver.findElement(By.id("username"));
//2. Below uses String Builder.
StringBuilder stringBuilder = new StringBuilder().append("Adil")
.append(" ").append("Prac");
//3. Below uses String Buffer.
StringBuffer stringBuffer = new StringBuffer().append("StringBuffer")
.append(" ");
email.sendKeys(stringBuilder, stringBuffer, Keys.TAB);
}
}
<file_sep>package Naveen.Collections;
import java.util.Iterator;
import java.util.LinkedList;
public class LinkedListNaveen {
public static void main(String... args) {
LinkedList<String> ll = new LinkedList<>();
ll.add("test");
ll.add("qtp");
ll.add("selenium");
ll.add("RPA");
ll.add("RFT");
System.out.println(ll);
//addFirst
ll.addFirst("Adil");
//addLast
ll.addLast("Automation");
System.out.println(ll);
//get
System.out.println(ll.get(1));
//set
ll.set(1, "Tom");
System.out.println(ll.get(1));
//removeFirst
ll.removeFirst();
//removeLast
ll.removeLast();
System.out.println(ll);
//remove from index
ll.remove(1);
System.out.println(ll);
//To Print all the elements of the LinkedList
//1. For each loop
System.out.println("^^^^^^^^^^^^^^^^Use Advanced For loop^^^^^^^^^^^^^^^^");
for (String s : ll ) {
System.out.println(s);
}
//2. Iterator
System.out.println("^^^^^^^^^^^^^^^^Using Iterator^^^^^^^^^^^^^^^^");
Iterator iterator = ll.iterator();
while (iterator.hasNext()) {
System.out.println(iterator.next());
}
System.out.println("^^^^^^^^^^^^^^^^Use For loop^^^^^^^^^^^^^^^^");
//3. Simple for loop
for (int i = 0; i <ll.size() ; i++) {
System.out.println(ll.get(i));
}
System.out.println("^^^^^^^^^^^^^^^^Use While loop^^^^^^^^^^^^^^^^");
//4. use While loop
int num = 0;
while (ll.size() > num) {
System.out.println(ll.get(num));
num++;
}
}
}<file_sep>package JavaInterPrograms;
public class ReverseInteger {
public static void main(String... args) {
int i = 123456789;
int rev = 0;
while (i != 0) {
rev = rev * 10 + i % 10;
i = i / 10;
}
System.out.println(rev);
}
}<file_sep>package Ploymorphism;
import java.util.ArrayList;
import java.util.List;
public class AnimalFeeder {
public static void main(String... args) {
Animal animal = new Cat();
animal.eat(); // Will fetch only Cat methods and nothing from Animal class.
((Cat) animal).meow(); //To Access the Cat methods.
// Even though animal is assigned to Cat object, the variable is fetched of Animal class
System.out.println(animal.animalVariable);
System.out.println("*********************************************************************************");
List<Animal> animals = new ArrayList<Animal>();
animals.add(new Cat());
animals.add(new Dog());
System.out.println("&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&");
new AnimalFeeder().feed(animals);
System.out.println("&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&");
//Below is an Interface
Mew mew = new Cat();
mew.meow();
//To make mew obj access Cat class methods, we need to upcast the subclass obj mew to
// Cat parent class and then access the
((Cat)mew).eat();
}
public void feed(List<Animal> animals) {
animals.forEach(animal -> {
if (animal instanceof Cat){
//Casting example
((Cat)animal).meow();
}
animal.eat();
});
}
}<file_sep>package HeadFirst;
public class WrapperClasses {
Integer i;
int j;
public static void main(String... args) {
WrapperClasses wrapperClasses = new WrapperClasses();
wrapperClasses.go();
}
public void go() {
System.out.println(i);
System.out.println(j);
j=1;
i=3;
j = i;
System.out.println(i);
System.out.println(j);
}
}
|
47938772c3693304b2f10efb3606ccadf7ca8c00
|
[
"Java"
] | 17 |
Java
|
adilmsrit/LeanFTPractice
|
dd3589804feb8e0440b1f7a462712c7ec447404f
|
54610768629681036cb0d3c7dad882b656e6829a
|
refs/heads/master
|
<file_sep># Build a class EmailParser that accepts a string of unformatted
# emails. The parse method on the class should separate them into
# unique email addresses. The delimiters to support are commas (',')
# or whitespace (' ').
class EmailParser
attr_accessor :text
@@all = []
@@email_str = String.new
@@emails=[]
def initialize(email_str)
@@email_str=email_str
@@all.push(self)
end
def self.emails
return @@emails
end
def parse
temp_arr=@@email_str.split(/,|\s/)
temp_copy=temp_arr
temp_arr.each do |e|
if e.length <3
temp_arr.delete(e)
end
end
@@emails=temp_arr
return @@emails.uniq
end
end
e=EmailParser.new("<EMAIL>, <EMAIL> <EMAIL>, <EMAIL>")
puts e.parse().inspect
|
5f065f9f7ddfec59793d3660758e62efb506f3db
|
[
"Ruby"
] | 1 |
Ruby
|
alrawi90/oo-email-parser-re-coded-000
|
b096638d02a5e05cf38a7a4702b1a3e6b4c6ec82
|
ae69ee7b13ec53a5b3688199987249e954aa3a7a
|
refs/heads/master
|
<repo_name>magj2006/nomicon_example<file_sep>/README.md
## All codes are from the nomicon book for Rust.
<file_sep>/ch5/src/main.rs
fn main() {
/*
checked section
*/
let x: i32;
if true {
x = 5;
} else {
x = 7;
}
println!("{}", x);
let x_r = x;
println!("{}", x);
println!("{}", x_r);
let y = Box::new(1);
let y_r = y.clone(); // Box is not Copy
println!("{}", y);
println!("{}", y_r);
/*
Drop Flag section
*/
let mut x = Box::new(0); // let makes a fresh variable, so never need to drop
let y = &mut x;
*y = Box::new(1); // Deref assumes the referent is initialized, so always drops
// drop(x); //Error
// println!("x: {}, y: {}", x, y); Error
/*
Unchecked section
*/
use std::mem::{self, MaybeUninit};
// Size of the array is hard-coded but easy to change (meaning, changing just
// the constant is sufficient). This means we can't use [a, b, c] syntax to
// initialize the array, though, as we would have to keep that in sync
// with `SIZE`!
const SIZE: usize = 10;
let x = {
// Create an uninitialized array of `MaybeUninit`. The `assume_init` is
// safe because the type we are claiming to have initialized here is a
// bunch of `MaybeUninit`s, which do not require initialization.
let mut x: [MaybeUninit<Box<u32>>; SIZE] = unsafe { MaybeUninit::uninit().assume_init() };
// Dropping a `MaybeUninit` does nothing. Thus using raw pointer
// assignment instead of `ptr::write` does not cause the old
// uninitialized value to be dropped.
// Exception safety is not a concern because Box can't panic
for i in 0..SIZE {
x[i] = MaybeUninit::new(Box::new(i as u32));
}
// Everything is initialized. Transmute the array to the
// initialized type.
unsafe { mem::transmute::<_, [Box<u32>; SIZE]>(x) }
};
dbg!(x);
}
|
0889f09ff40df645b9b6808b18311749adf5e817
|
[
"Markdown",
"Rust"
] | 2 |
Markdown
|
magj2006/nomicon_example
|
bef5fd3df439796e4d3c35de6aa2354a1ed857dc
|
3e65d7bd6566eed90ca4a3edf4a86278db04462a
|
refs/heads/master
|
<repo_name>paul-charlton/Bug_XamarinShellPopNavigation<file_sep>/ShellPopIssue/ShellPopIssue/Views/NotAShell.xaml.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Xamarin.Forms;
using Xamarin.Forms.Xaml;
namespace ShellPopIssue.Views
{
[XamlCompilation(XamlCompilationOptions.Compile)]
public partial class NotAShell : ContentPage
{
public NotAShell()
{
InitializeComponent();
GotoShellButton.Command = new Command(() => Application.Current.MainPage = new AppShell());
}
}
}<file_sep>/ShellPopIssue/ShellPopIssue/Views/PushPopPage.xaml.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Xamarin.Forms;
using Xamarin.Forms.Xaml;
namespace ShellPopIssue.Views
{
[XamlCompilation(XamlCompilationOptions.Compile)]
public partial class PushPopPage : ContentPage
{
public PushPopPage()
{
InitializeComponent();
PushTheButton.Command = new Command(async () => await Shell.Current.GoToAsync("pushpop").ConfigureAwait(false));
PopTheButton.Command = new Command(async () => await Shell.Current.Navigation.PopAsync().ConfigureAwait(false));
}
}
}<file_sep>/ShellPopIssue/ShellPopIssue/AppShell.xaml.cs
using ShellPopIssue.ViewModels;
using ShellPopIssue.Views;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using Xamarin.Forms;
namespace ShellPopIssue
{
public partial class AppShell : Xamarin.Forms.Shell
{
private bool _started;
public AppShell()
{
InitializeComponent();
Routing.RegisterRoute(nameof(ItemDetailPage), typeof(ItemDetailPage));
Routing.RegisterRoute(nameof(NewItemPage), typeof(NewItemPage));
_started = true;
}
protected override void OnAppearing()
{
// hopefully to stop a null ref exception in shell code
this.SetValue(NavBarIsVisibleProperty, false);
base.OnAppearing();
}
protected override void OnChildRemoved(Element child, int oldLogicalIndex)
{
base.OnChildRemoved(child, oldLogicalIndex);
}
protected override void OnNavigated(ShellNavigatedEventArgs args)
{
base.OnNavigated(args);
Debug.WriteLine($"NavigatED From: {args.Previous?.Location?.ToString() ?? "-"}; To: {args.Current?.Location?.ToString() ?? "-"}; By: {args.Source.ToString()};");
if (!_started)
return;
var shellSection = Shell.Current?.CurrentItem;
var shellContent = shellSection?.CurrentItem;
var items = shellContent?.Items;
var stack = shellContent?.Stack;
Debug.WriteLine($"Stack Count: {stack?.Count ?? -1}");
/*
* Now using the Pre-nav option
if (args.Current?.Location.ToString().Equals("//home/myhealth/default", System.StringComparison.InvariantCultureIgnoreCase) == true && !PrototypeService.IsUserAuthenticated)
{
// just push to new page
await GoToAsync("myhealth/guest", false).ConfigureAwait(false);
}*/
}
protected override async void OnNavigating(ShellNavigatingEventArgs args)
{
base.OnNavigating(args);
Debug.WriteLine($"NavigatING From: {args.Current?.Location?.ToString() ?? "-"}; To: {args.Target?.Location?.ToString() ?? "-"}; By: {args.Source.ToString()};");
/*
#if DEBUG
var test = Shell.GetNavBarIsVisible(new MyCareHubPage());
var test2 = Shell.GetNavBarIsVisible(new MyHealthGuestPage());
var shellSection = Shell.Current?.CurrentItem;
var shellContent = shellSection?.CurrentItem;
var items = shellContent?.Items;
var stack = shellContent?.Stack;
if (!_shownKeys)
{
var keys = typeof(Routing).GetMethod("GetRouteKeys", BindingFlags.Static | BindingFlags.NonPublic)?.Invoke(null, null) as string[] ?? new string[0];
if (keys?.Length > 0)
{
Debug.WriteLine($"Routing Keys:\r\n{string.Join("\r\n", keys)}");
_shownKeys = true;
}
}
#endif
*/
if (!_started)
return;
// this is weird but it seems to leave a load of stuff on the stack that stops navigation working??
if (args.Current?.Location?.ToString()?.Equals(args.Target?.Location?.ToString() ?? "-") == true && args.Source == ShellNavigationSource.ShellSectionChanged)
await Current.Navigation.PopToRootAsync().ConfigureAwait(false);
}
}
}
|
f831773652337d26e3d2f5e8b587c8b90c7c29a6
|
[
"C#"
] | 3 |
C#
|
paul-charlton/Bug_XamarinShellPopNavigation
|
97319f9c9a1e98de8436578a7f508f2a71622d9a
|
77ab3a8d11d4284675c3a1bbece3d479a9acf4ff
|
refs/heads/master
|
<file_sep>const PubSub = require('../helpers/pub_sub.js');
const IntensityView = function () {
};
IntensityView.prototype.bindEvents = function(){
PubSub.subscribe('Regions:carbon-intensity-ready', (evt) => {
const carbonIntensity = evt.detail;
this.renderTopContainer(carbonIntensity);
this.renderMapImage()
});
};
IntensityView.prototype.renderTopContainer = function(carbonIntensity) {
const intensityContainer = document.querySelector('#heading');
const intensityElement = this.createElement('div', `${carbonIntensity.actual}`);
intensityElement.classList.add('intensity');
this.styleAccordingToIndex(carbonIntensity, intensityElement);
// intensityElement.insertAdjacentHTML('afterbegin', 'Current Carbon Intensity');
const breakElement = document.createElement('br');
const textIntensity = document.createElement('div');
textIntensity.insertAdjacentHTML('beforeend', '<small>gCO<sub>2</sub>/kWh</small>');
intensityElement.appendChild(breakElement);
intensityElement.appendChild(textIntensity);
intensityContainer.appendChild(intensityElement);
};
IntensityView.prototype.renderMapImage = function () {
const container = document.querySelector('#heading');
const imgContainer = document.createElement('div');
imgContainer.classList.add('map-box');
const imageElement = document.createElement('img');
imageElement.src = './img/iu.png';
imageElement.alt = 'Great Britain blank grey map';
imgContainer.appendChild(imageElement);
container.appendChild(imgContainer);
}
IntensityView.prototype.styleAccordingToIndex = function(carbonIntensity, element){
const carbonIndex = carbonIntensity.index;
switch (carbonIndex) {
case "very low":
element.style.color = "#1E773C";
break;
case "low":
element.style.color = "#6DC68F";
break;
case "moderate":
element.style.color = "#ECBA10";
break;
case "high":
element.style.color = "#DD691B";
break;
case "very high":
element.style.color = "#97201C"
}
return carbonIndex;
};
IntensityView.prototype.createElement = function(elementType, text){
const element = document.createElement(elementType);
element.textContent = text;
return element;
};
module.exports = IntensityView;
<file_sep>const Regions = require('./models/regions.js');
const RegionsMenuView = require('./views/regions_menu_view.js');
const RegionInfoView = require('./views/region_info_view.js');
const MainView = require('./views/main_view.js');
const IntensityView = require('./views/intensity_view.js');
const PieChart = require('./views/high_charts/pie_chart.js');
document.addEventListener('DOMContentLoaded', () => {
console.log('JavaScript loaded');
const selectElement = document.querySelector('select#regions-dropdown');
const regionsMenu = new RegionsMenuView(selectElement);
regionsMenu.bindEvents();
const detailsContainer = document.querySelector('section#info-box');
const regionDetailsView = new RegionInfoView(detailsContainer);
regionDetailsView.bindEvents();
const mainContainer = document.querySelector('section#info-box');
const mainView = new MainView(mainContainer);
mainView.bindEvents();
const pieChart = new PieChart();
pieChart.bindEvents();
const intensityView = new IntensityView();
intensityView.bindEvents();
const regions = new Regions("https://api.carbonintensity.org.uk/regional");
// const regions = new Regions;
regions.getData();
regions.bindEvents();
});
<file_sep># Carbon Intensity in UK Regions (CIUR)

This is a front-end JavaScript web app that uses the official Carbon Intensity API for Great Britain developed by National Grid to create request of the data.
##### Built With
CIUR (Carbon Intensity in UK Regions) has been built with JavaScript as the main programming language using PubSub pattern. This app has now being deployed with Heroku, you can check it out in the following link:
<https://ciur.herokuapp.com>
## MVP
* The application should display data from an API request.
* The application should have a clear separation of concerns using a model and views.
## Extensions
* Take input from the user to update the page. You could update the page by filtering or manipulating the data on user interaction, or you might make further API request to load more data that is then displayed.
## Advance Extensions
* Looking into a library to visual the data.
HighCharts is an open-source library for rendering charts. You will need to use the library's documentation to integrate it into your application.
## Getting Started
The following steps will get you a copy of the project up and running on your local machine for development and testing purposes.
### Prerequisites
Befor starting, there are a few packages that are needed to be installed. For this, we will be using npm which is a package manager for JavaScript language. First npm init and npm install the followings:
* Webpack
* Webpack-cli
### Installing
First, let's initialise npm in the project folder.
```
npm init
```
Then, let's install Webpack which is a static module bundle for modern JavaScript applications. Once it is installed, a configuration file is require.
```
npm install -D webpack webpack-cli
```
### Running
In order to make the app running, there are required to add a script to package.json.
```
"build": "webpack -w"
```
This are the commands needed to start running the application in your local machine. In your command line, copy and paste the following:
```
npm run build
```
And, in a new tab, copy and paste this:
```
npm start
```
You can view the app running at port <http://localhost:3000>
<file_sep>const PubSub = require('../helpers/pub_sub.js');
const MainView = function (container) {
this.container = container;
};
MainView.prototype.bindEvents = function () {
PubSub.subscribe('Regions:regions-data-loaded', (evt) => {
regionsData = evt.detail;
const oneRegion = this.selectOneRegion(regionsData);
PubSub.publish('MainView:one-region-ready', oneRegion);
});
};
MainView.prototype.selectOneRegion = function(regions) {
const oneRegion = regions.splice(17, 1);
return oneRegion;
}
module.exports = MainView;
<file_sep>const PubSub = require('../helpers/pub_sub.js');
const IntensityView = require('./intensity_view.js');
const RegionInfoView = function (container) {
this.container = container;
};
RegionInfoView.prototype.bindEvents = function () {
PubSub.subscribe('Regions:region-clicked-ready', (event) => {
const regionDetails = event.detail;
this.render(regionDetails);
const dataFuels = this.chartifyDataFuels(regionDetails);
PubSub.publish('RegionInfoView:data-fuels-ready', dataFuels);
});
};
RegionInfoView.prototype.render = function (region) {
this.container.innerHTML = '';
const detailsContainer = this.createDetailContainer(region);
this.container.appendChild(detailsContainer);
}
RegionInfoView.prototype.createDetailContainer = function (region) {
const container = document.createElement('div');
container.classList.add('details-box');
const heading = this.createElement('h2', `${region.shortname}`);
container.appendChild(heading);
const carbonIntensity = this.createElement('h3', `${region.intensity.forecast} ` );
carbonIntensity.insertAdjacentHTML('beforeend', '<br><div><small>gCO<sub>2</sub>/kWh</small></div>');
this.styleAccordingToIndex(region, carbonIntensity);
container.appendChild(carbonIntensity);
const carbonIndexElement = this.createElement('h4', `${region.intensity.index}`);
// carbonIndexElement.setAttribute("id", "carbon-index");
this.styleAccordingToIndex(region, carbonIndexElement);
container.appendChild(carbonIndexElement);
return container;
};
RegionInfoView.prototype.styleAccordingToIndex = function(region, element){
const carbonIndex = region.intensity.index;
switch (carbonIndex) {
case "very low":
element.style.color = "#1E773C";
break;
case "low":
element.style.color = "#6DC68F";
break;
case "moderate":
element.style.color = "#ECBA10";
break;
case "high":
element.style.color = "#DD691B";
break;
case "very high":
element.style.color = "#97201C"
}
return carbonIndex;
};
RegionInfoView.prototype.chartifyDataFuels = function (region) {
const data = [];
const regionFuels = region.generationmix;
for (let key in regionFuels) {
const info = [];
if (regionFuels[key].perc !== 0) {
info.push(regionFuels[key].fuel);
info.push(regionFuels[key].perc);
data.push(info);
}
}
return data;
};
RegionInfoView.prototype.createElement = function (elementType, text) {
const element = document.createElement(elementType);
element.textContent = text;
return element;
};
module.exports = RegionInfoView;
<file_sep>const PubSub = require('../helpers/pub_sub.js');
const RegionsMenuView = function (selectElement) {
this.selectElement = selectElement;
};
RegionsMenuView.prototype.bindEvents = function () {
PubSub.subscribe('Regions:regions-data-loaded', (event) => {
const regionsData = event.detail;
const regions = this.populateMenu(regionsData);
});
this.selectElement.addEventListener('change', (event) => {
event.preventDefault();
const clickedRegionClassId = event.target.value;
PubSub.publish('RegionsMenuView:region-clicked', clickedRegionClassId);
});
};
RegionsMenuView.prototype.populateMenu = function (regionsData) {
regionsData.forEach( (region) => {
if (region.shortname !== "GB") {
const option = document.createElement('option');
option.textContent = region.shortname;
option.value = region.shortname;
this.selectElement.appendChild(option);
}
});
};
module.exports = RegionsMenuView;
<file_sep>const RequestHelper = require('../helpers/request_helper.js');
const PubSub = require('../helpers/pub_sub.js');
const Regions = function () {
this.regionsData = null;
this.carbonIntensity = null;
};
Regions.prototype.getData = function () {
//1st request to API get Object with regions data
const requestHelper = new RequestHelper('https://api.carbonintensity.org.uk/regional');
requestHelper.get()
.then( (dataRegions) => {
this.regionsData = dataRegions.data[0].regions;
PubSub.publish('Regions:regions-data-loaded', this.regionsData);
});
//2nd request to API get Object with current carbon intensity
const newRequestHelper = new RequestHelper('https://api.carbonintensity.org.uk/intensity');
newRequestHelper.get()
.then( (carbonIntensity) => {
this.carbonIntensity = carbonIntensity.data[0].intensity;
console.log('carbon intensity',this.carbonIntensity);
PubSub.publish('Regions:carbon-intensity-ready', this.carbonIntensity);
});
};
Regions.prototype.bindEvents = function () {
PubSub.subscribe('RegionsMenuView:region-clicked', (event) => {
const chosenRegion = event.detail;
const clickedRegion = this.findByRegionId(chosenRegion);
PubSub.publish('Regions:region-clicked-ready', clickedRegion);
});
};
Regions.prototype.findByRegionId = function (searchId) {
const foundRegion = this.regionsData.find( (currentRegion) => {
return searchId == currentRegion.shortname;
});
return foundRegion;
};
module.exports = Regions;
|
3a97c6e6250f06265f3cccc072d6e5f9c4110642
|
[
"JavaScript",
"Markdown"
] | 7 |
JavaScript
|
matosca/CIUR-JavaScript_app
|
5145ed82d2ba52cd7a38b27e802cb71657ae28c7
|
e9d784a0a147ed7bdd24898b144f83a9f34f950d
|
refs/heads/main
|
<file_sep># Password-generator
## Purpose
The application can be used to generate a random password based on criteria the user selects.
## Built With
* HTML
* CSS
* Javascript
### Link to deplyed application: https://marisela-gutierrez.github.io/Password-generator/
## Contribution
Made with ❤️ by <NAME>
<file_sep>var numeric = ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9"];
var lowerCase = ['a','b','c','d','e','f','g','h','i','j','k','l','m','n','o','p','q','r','s','t','u','v','w','x','y','z',];
var upperCase = ["A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z"];
var specialChar = ["!","@",":","#","$","%","^","&","*","_","?","?"];
var password = [];
var useChar = [];
// function to generate password
var generatePassword = function() {
// Enter password length
passwordLength();
//Choose password criteria
passwordCriteria();
// check if at least one criteria is selcted
checkCriteria();
};
//function to choose password length
var passwordLength = function() {
var length = "";
while (length === "" || length === null || length < 8 || length > 128) {
length = prompt("Please enter a password length between 8 and 128 characters.");
}
console.log("Your password character length is " + length);
return length;
};
// Function for password criteria
var passwordCriteria = function() {
lowerCaseChar =
window.confirm("Would you like lowercase characters in your password?");
if (lowerCaseChar) {
useChar = useChar.concat(lowerCase);
}
//Choose uppercase
upperCaseChar =
window.confirm("Would you like uppercase characters in your password?");
if (upperCaseChar) {
useChar = useChar.concat(upperCase);
}
//Choose numeric
numericChar =
window.confirm("Would you like numeric characters in your password?");
if (numericChar) {
useChar = useChar.concat(numeric);
}
//choose special characters
specialCharr =
window.confirm("Would you like special characters in your password?");
if (specialCharr) {
useChar = useChar.concat(specialChar);
}
};
var checkCriteria = function() {
if (
lowerCaseChar === true ||
upperCaseChar === true ||
numericChar === true ||
specialCharr === true
){
randomPassword();
}
else {
window.alert("At least one character type should be selected. Please try again.");
passwordCriteria();
}
};
var randomPassword = function() {
return passwordCriteria[Math.floor(Math.random() * passwordCriteria.length)];
};
/* Generate Password*/
generatePassword();
// Get references to the #generate element
var generateBtn = document.querySelector("#generate");
// Write password to the #password input
function writePassword() {
var password = generatePassword();
var passwordText = document.querySelector("#password");
passwordText.value = password;
}
// Add event listener to generate button
generateBtn.addEventListener("click", writePassword);
|
2d84018ff4ec80b26afa12894bda7906e8debbe2
|
[
"Markdown",
"JavaScript"
] | 2 |
Markdown
|
marisela-gutierrez/Password-generator
|
262858c09c740e8ea10777ce7a514da448cd4ca1
|
3001411fb470c858079fdebcb2ff236b72c71498
|
refs/heads/main
|
<file_sep>const express = require("express");
const mongoose = require("mongoose");
const cors = require("cors");
mongoose.connect('mongodb://localhost:27017/capstone-project-1', { useNewUrlParser: true , useUnifiedTopology: true }, () =>
{
console.log("Sever connected check localhost:5000");
})
const grocerySchema = new mongoose.Schema({
groceryItem: String,
isPurchased: Boolean,
});
const Grocery = mongoose.model("grocery", grocerySchema);
const app = express();
app.use(cors());
app.use(express.json());
app.post("/grocery/add", async (req, res) =>{
const grocery = new Grocery ({
groceryItem: req.body.groceryItem,
isPurchased:req.body.isPurchased,
})
try{
const data= await grocery.save()
res.json(data)
}catch(err){
res.send('Error')
}
});
app.get("/grocery/getAll", async (req, res) => {
const allGroceryItems = await Grocery.find({})
res.json({
allGroceryItems
});
});
app.put("/grocery/updatePurchaseStatus", async (req, res) =>{
const id =req.body._id;
try{
const grocery = await Grocery.findById(id)
grocery.isPurchased= req.body.isPurchased
const data= await grocery.save()
res.send({result: 'Item updated successfully'})
}catch(err){
res.send('Error')
}
});
app.delete("/grocery/deleteGroceryItem", async (req,res) => {
const id =req.body._id;
await Grocery.findByIdAndRemove(id).exec();
res.send({result: 'Item deleted successfully'});
});
const db = mongoose.connection;
db.once("open", () => {
app.listen(5000);
});<file_sep>To run this Project:
1. Extract the file
backend:
2. Open cmd from "vaersolutions" folder
3. run "yarn install"
4. run "yarn start"
frontend:
5. Open cmd from "frontend" folder
6. run "yarn install"
7. run "yarn start"
Application will open from the "http://localhost:3000/"
Thank You<file_sep>To run this App follow the steps:
1. Open cmd from backend folder and run "npm install".
2. After npm install run "npm start".
3. serer will start at localhost:5000.
4. Open cmd from frontend folder and run "npm install".
5. And then run "npm start".
6. Application will open automatically at localhost:3000.
|
48ba9e0d0564f8fdf4c399410119d1360701e040
|
[
"JavaScript",
"Markdown"
] | 3 |
JavaScript
|
Prakash-Palanisamy/GroceryList
|
7c47ad5cb095a4703349e59ab9abe85cfadf4506
|
2a1c60b95934b0e047c6900af2e1fa3f4877233c
|
refs/heads/master
|
<repo_name>MWilliams15/wpp-testing<file_sep>/UserInterface/src/main/resources/static/js/Login/Error.js
$(document).ready(function(){
$('#goBack').on('click', function(){
window.location = './';
});
});<file_sep>/UserInterface/src/main/resources/static/js/Admin/ManageUsers.js
$(document).ready(function(){
function populateDataTable(data){
var table = $('#userTable');
table.DataTable({
data:data.userList,
columns: [
{title: "<NAME>",data:"givenName"},
{title: "Surname",data:"surname"},
{title: "Position",data:"userRole"}
],
});
}
$.ajax({
url:'/Admin/GetUsersList',
type:'GET',
success: function(data){
console.log(data);
populateDataTable(data);
},
error: function(){
}
});
});<file_sep>/AdministrationService/settings.gradle
rootProject.name = 'AdministrationService'
include ':Contracts'
project(':Contracts').projectDir = new File('../Contracts')
<file_sep>/AdministrationService/src/main/java/com/workplacementplatform/services/administrationservice/DemoApplicationAdministrationWebServiceApplication.java
package com.workplacementplatform.services.administrationservice;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class DemoApplicationAdministrationWebServiceApplication {
public static void main(String[] args) {
SpringApplication.run(DemoApplicationAdministrationWebServiceApplication.class, args);
}
}
<file_sep>/UserInterface/src/main/java/com/workplacementplatform/userinterface/models/LoginModel.java
package com.workplacementplatform.userinterface.models;
public class LoginModel{
private String userName;
private String password;
public String getPassword() {
return password;
}
public void setPassword(String aPassword) {
password = aPassword;
}
public String getUserName() {
return userName;
}
public void setUserName(String aUserName) {
userName = aUserName;
}
}<file_sep>/Contracts/build.gradle
buildscript {
repositories { mavenCentral() }
}
ext { springBootVersion = '2.0.1.RELEASE' }
apply plugin: 'java'
apply plugin: 'eclipse'
jar {
baseName = 'com.workplacementplatform_poc.contracts'
}
sourceCompatibility = 1.8
repositories { mavenCentral() }
<file_sep>/UserInterface/bin/main/application.properties
server.port=${PORT:8080}
services.administration.url=http://localhost:8090/
|
649dca4ae3c1037e19956e9118c6ff72f755978b
|
[
"JavaScript",
"Java",
"INI",
"Gradle"
] | 7 |
JavaScript
|
MWilliams15/wpp-testing
|
6f86c7dff5c58a2137df19bdc5a205c4cb42d6ee
|
d0c32b3d598b5b81e17b670e2d42ee918e25427e
|
refs/heads/main
|
<repo_name>rahulddutta/mern-blog<file_sep>/client/src/components/Sidebar/Sidebar.jsx
import axios from "axios"
import { useEffect, useState } from "react"
import { Link } from "react-router-dom"
import "./sidebar.css"
const Sidebar = () => {
const [cats,setCats] = useState([])
useEffect(() => {
const getCats = async () => {
const res = await axios.get("/categories")
setCats(res.data)
}
getCats()
},[])
return (
<div className="sidebar">
<div className="sidebarItem">
<span className="sidebarTitle">ABOUT ME</span>
<img
className="sidebarImg"
src="https://cdn.pixabay.com/photo/2021/08/22/15/39/kid-6565461__340.jpg"
alt="profile"
/>
<p>
Lorem ipsum dolor sit amet, consectetur adipisicing elit. Illo,
ratione. Lorem ipsum, dolor sit amet consectetur adipisicing elit.
Consequuntur, blanditiis?
</p>
</div>
<div className="sidebarItem">
<span className="sidebarTitle">CATEGORIES</span>
<ul className="sidebarList">
{cats.map((c) => (
<Link to={`/?cat=${c.name}`} className="link">
<li className="sidebarListItem">{c.name}</li>
</Link>
))}
</ul>
</div>
<div className="sidebarItem">
<span className="sidebarTitle">FOLLOW US</span>
<div className="sidebarSocial">
<i className=" sidebarIcon fab fa-facebook-messenger"></i>
<i className="sidebarIcon fab fa-twitter"></i>
<i className="sidebarIcon fab fa-instagram"></i>
</div>
</div>
</div>
)
}
export default Sidebar
|
722ac05e60e8320454d6dd69298b0ec7988c8e85
|
[
"JavaScript"
] | 1 |
JavaScript
|
rahulddutta/mern-blog
|
c160a36588608fdaa4bfd5d6ff65a0acf44c2765
|
07a5f777c29110264fd572b52cd04930279b14d9
|
refs/heads/master
|
<file_sep>Challenges = new Meteor.Collection('challenges');
Challenges.allow({
insert: function() {
return true;
},
remove: function() {
return true;
}
});<file_sep>#Programmaverse
>"Meteor is an ultra-simple environment for building modern websites. What once took weeks, even with the best tools, now takes hours with Meteor." — [Meteor](http://docs.meteor.com/#/full/structuringyourapp)
###Awesome!
If you want to build upon a solid open source project that help you get shit done, give it a try. So long old days of endless experimentation, we wan't to deliver a tool that help people create value.
##Collaborating
We're active on [Gitter](https://gitter.im/randomhackers?utm_source=share-link&utm_medium=link&utm_campaign=share-link). If you want to chat collaborate reach us there.
#Start Here!
Download and [install](https://www.meteor.com/install) Meteor (Be patient!)
curl https://install.meteor.com/ | sh
Clone this repo, branch and run the app!
git clone https://github.com/randomhackers/Programmaverse
cd Programmaverse
git branch --set-upstream-to=origin/meteor-app meteor-app
git checkout -b <your-branch>
meteor
###Learn the basics!
Understand the [App strcture](http://docs.meteor.com/#/full/structuringyourapp).
###Do your stuff!
###Commit and merge.
git commit -m "A nice message"
git checkout meteor-app
git merge <your-branch>
If you're a Global Hackathon team member, push your changes.
git push origin meteor-app
We're already working with a few meteor [packages]().
You can install and remove packages with
meteor remove <package_name> [...]
meteor add <package_name> [...]
* [Less](http://docs.meteor.com/#/full/less)
* [Nemo64:Bootstrap](https://github.com/Nemo64/meteor-bootstrap)
* [accounts-password](http://docs.meteor.com/#/full/accounts_passwords)
* [accounts-base](http://docs.meteor.com/#/full/accountsui)
##Other Resources
* [Meteor docs adding user accounts](https://www.meteor.com/try/9)
* [Color pallet](http://www.color-hex.com/color/3399cc)
* [Meteor docs](http://docs.meteor.com/#/full/)
* [Bootstrap docs](http://getbootstrap.com/css/)
* [Bootstrap the Meteor way](http://www.manuel-schoebel.com/blog/meteorjs-and-twitter-bootstrap---the-right-way)
* [Meteor user authentication tutorial](http://help.nitrous.io/meteor-app/)
* [Meteor docs session/user authentication API](http://docs.meteor.com/#/full/session_equals)
* [Tutorial for customizing account-ui](http://blog.benmcmahen.com/post/41741539120/building-a-customized-accounts-ui-for-meteor)
<file_sep>Template.body.events({
'click button[name="logout"]' : function(e, t) {
e.preventDefault();
Meteor.logout();
Session.set('isLoggedIn', false);
Session.set('currentView', 'home');
return false;
},
'click a[href="challenges"]' : function(e, t) {
e.preventDefault();
Session.set('currentView', 'challenges');
return false;
},
'click a[href="home"]' : function(e, t) {
e.preventDefault();
Session.set('currentView', 'home');
return false;
}
});
Template.body.helpers({
hasAccount: function(){
return Session.get("hasAccount");
},
isLoggedIn: function(){
return Session.get("isLoggedIn");
},
view: function() {
return Session.get('currentView');
}
})
<file_sep>Meteor.startup(function(){
Session.setDefault('currentView', 'home')
})<file_sep>Template.dashboard.helpers({
email: function() {
var user = Meteor.user();
return user.emails[0]['address'];
}
})<file_sep>if (Meteor.isClient) {
Template.challenges.helpers({
challenges: function() {
return Challenges.find({});
}
});
Template.challenges.events({
'click span': function() {
// I'm about to reach there.
}
})
}
|
05b11177ffe6b5d649ed6bb9c9babfae08dcaf61
|
[
"JavaScript",
"Markdown"
] | 6 |
JavaScript
|
randomhackers/programmaverse-meteor
|
37103c2f8093d1c1edb4696f8ff0f76019cc2edd
|
16e636acbfc7072fe5ad8c6b7a284b7572598335
|
refs/heads/master
|
<repo_name>stevekinney/trance-leg<file_sep>/README.md
# Trance Leg
A very important library for working with rectangles.
## API
Trance Leg takes four arguments: `x`, `y`, `width`, and `height`.
It also has two methods:
- `area`: returns the area of the rectangle
- `perimeter`: returns the perimeter of the rectangle
<file_sep>/src/rectangle.js
class Rectangle {
constructor(x, y, width, height) {
this.x = x;
this.y = y;
this.width = width;
this.height = height;
}
get perimeter() {
return (this.width * 2) + (this.height * 2);
};
get area() {
return this.width * this.height;
};
}
module.exports = Rectangle;
|
9fa1ac29d318c4398d012f86bb15e855c9f3dd90
|
[
"Markdown",
"JavaScript"
] | 2 |
Markdown
|
stevekinney/trance-leg
|
508650f7d6bf877e0a86c4a5786822801ac73a42
|
1bbf50d03eb8577238d986e8b4581ff242278e0b
|
refs/heads/master
|
<file_sep>import re
total = 0
ribon = 0
with open('fabric.txt', 'r+') as f:
text = [linia.rstrip() for linia in f]
for size in text:
structure = r"(\d{1,2})x(\d{1,2})x(\d{1,2})"
m = re.search(structure, size)
if m:
l = int(m.group(1))
w = int(m.group(2))
h = int(m.group(3))
equation = (2 * l * w) + (2 * w * h) + (2 * h * l)
if l*w <= w*h <= h*l or l*w <= h*l <= w*h:
total += equation + l*w
ribon += 2*l + 2*w + l*w*h
elif w*h <= l*w <= h*l or w*h <= h*l <= l*w:
total += equation + w*h
ribon += 2 * w + 2 * h + l*w*h
elif h*l <= l*w <= w*h or h*l <= w*h <= l*w:
total += equation + h*l
ribon += 2 * h + 2 * l + l*w*h
print(total)
print(ribon)
|
95f156a18adfa38856e3dea0a14903d1a9d2d87b
|
[
"Python"
] | 1 |
Python
|
Czajnikus/AdventOfCode2015-Day2
|
cfa9cf8d57e90e1f7a49545e4699ccae58bdb6a3
|
a3894da3aece8d7ddee1ddcb8ed9cbb8f6377bce
|
refs/heads/master
|
<repo_name>bit1120161907/shuduku09<file_sep>/数独作业(含源代码)/ConsoleApplication11/ConsoleApplication11.cpp
#include "stdafx.h"
int sudo[9][9] = { //构造新数独棋盘的原始棋盘
{ 1, 6, 2, 5, 9, 3, 4, 7, 8 },
{ 5, 9, 7, 1, 8, 4, 6, 3, 2 },
{ 4, 8, 3, 2, 6, 7, 1, 9, 5 },
{ 6, 3, 5, 7, 1, 8, 9, 2, 4 },
{ 8, 7, 9, 6, 4, 2, 3, 5, 1 },
{ 2, 4, 1, 3 ,5 ,9 ,7 ,8, 6 },
{ 9, 2 ,6 ,4, 7, 5, 8, 1, 3 },
{ 7, 5, 4, 8, 3, 1, 2, 6, 9 },
{ 3, 1, 8, 9, 2, 6, 5, 4, 7 }
};
int check(int a[10][10], int tei[9][10], int tej[9][10], int tek[9][10], int dgt[50][3], int dgtc)
{
dgtc--;
if (dgtc == 0)
{
FILE *fp;
errno_t err;
if ((err = fopen_s(&fp, "shuduku.txt", "w")) != 0) { /* 打开文件 */
printf("File open error!\n");
exit(0);
}
for (int r = 0; r < 9; r++)
{
for (int c = 0; c < 9; c++)
fprintf_s(fp, "%d ", a[r][c]);
fprintf_s(fp, "\n");
}
fprintf_s(fp, "\n");
}
int i, j;
i = dgt[dgtc][0];
j = dgt[dgtc][1];
int k;
if (i >= 0 && i < 3 && j >= 0 && j < 3) k = 0;
if (i >= 0 && i < 3 && j >= 3 && j < 6) k = 1;
if (i >= 0 && i < 3 && j >= 6 && j < 9) k = 2;
if (i >= 3 && i < 6 && j >= 0 && j < 3) k = 3;
if (i >= 3 && i < 6 && j >= 3 && j < 6) k = 4;
if (i >= 3 && i < 6 && j >= 6 && j < 9) k = 5;
if (i >= 6 && i < 9 && j >= 0 && j < 3) k = 6;
if (i >= 6 && i < 9 && j >= 3 && j < 6) k = 7;
if (i >= 6 && i < 9 && j >= 6 && j < 9) k = 8;
for (int s = 1; s < 10; s++)
{
if (tei[i][s] == 1 && tej[j][s] == 1 && tek[k][s] == 1)
{
a[i][j] = s;
tei[i][s] = 2;
tej[j][s] = 2;
tek[k][s] = 2;
if (!check(a, tei, tej, tek, dgt, dgtc))
{
a[i][j] = 0;
tei[i][s] = 1;
tej[j][s] = 1;
tek[k][s] = 1;
}
}
}
return 0;
}
void solute(int a[10][10])
{
FILE *fp;
errno_t err;
if ((err = fopen_s(&fp, "shuduku.txt", "w")) != 0) { /* 打开文件 */
printf("File open error!\n");
exit(0);
}
int dgt[50][3];
int dgtc = 0;
int tei[9][10];
int tej[9][10];
int tek[9][10];
int k;
memset(tei, 0, sizeof(tei[0][0]));
memset(tej, 0, sizeof(tej[0][0]));
memset(tek, 0, sizeof(tek[0][0]));
for (int i = 0; i < 9; i++)
{
for (int j = 0; j < 9; j++)
{
if (a[i][j] == 0)
{
dgt[dgtc][0] = i;
dgt[dgtc][1] = j;
dgtc++;
}
else
{
tei[i][a[i][j]] ++;
tej[j][a[i][j]]++;
if (i >= 0 && i < 3 && j >= 0 && j < 3) k = 0;
if (i >= 0 && i < 3 && j >= 3 && j < 6) k = 1;
if (i >= 0 && i < 3 && j >= 6 && j < 9) k = 2;
if (i >= 3 && i < 6 && j >= 0 && j < 3) k = 3;
if (i >= 3 && i < 6 && j >= 3 && j < 6) k = 4;
if (i >= 3 && i < 6 && j >= 6 && j < 9) k = 5;
if (i >= 6 && i < 9 && j >= 0 && j < 3) k = 6;
if (i >= 6 && i < 9 && j >= 3 && j < 6) k = 7;
if (i >= 6 && i < 9 && j >= 6 && j < 9) k = 8;
tek[k][a[i][j]]++;
}
}
}
check(a, tei, tej, tek, dgt, dgtc);
}
int main(int argc, char* argv[])
{
FILE *fp;
errno_t err;
if ((err = fopen_s(&fp,"shuduku.txt", "w")) != 0) { /* 打开文件 */
printf("File open error!\n");
exit(0);
}
else
printf("The file 'shuduku.txt' was opened\n");
if (strcmp(argv[1], "-c") == 0)
{
int cnt = 0;
int n = strlen(argv[2]);
for (int te = 0; te< n; te ++)cnt += (argv[2][te] - 48) * pow(10, n - te - 1);
printf("%d", cnt);
int shift[9] = { 0, 3, 6, 1, 4, 6, 2, 5, 8 };
char row[10] = "812345679";
for (int k = 0; k < 40320 && cnt; k++)
{
if (k) next_permutation(row + 1, row + 9);
for (int i = 0; i < 6 && cnt; i++)
{
if (i)
{
next_permutation(shift + 3, shift + 6);
shift[6] = 2, shift[7] = 5, shift[8] = 8;
}
for (int j = 0; j < 6 && cnt; j++)
{
if (j) next_permutation(shift + 6, shift + 9);
for (int r = 0; r < 9; r++)
{
for (int c = 0; c < 9; c++)
{
//printf_s("%d ", row[sudo[shift[r]][c] - 1]);
fprintf_s(fp, "%c ", row[sudo[shift[r]][c] - 1]);
}
//printf_s("\n");
fprintf_s(fp, "\n");
}
cnt--;
//printf_s("\n");
fprintf_s(fp, "\n");
}
}
}
}
if (strcmp(argv[1], "-s") == 0)
{
FILE *fp;
errno_t err;
if ((err = fopen_s(&fp, argv[2], "w")) != 0) { /* 打开文件 */
printf("File open error!\n");
exit(0);
}
char CmdLine[10];
int temsodu[10][10];
int j = 0;
while (fscanf_s(fp, "%[^\n]", CmdLine) != EOF)
{
fgetc(fp);
for (int i = 0; i < 9; i++)
{
temsodu[j][i] = CmdLine[i] - '48';
}
j++;
if (j == 9)
{
fgetc(fp);
j = 0;
solute(temsodu);
}
}
}
if (fclose(fp)) { /* 关闭文件 */
printf("Can not close the file!\n");
exit(0);
}
return 0;
}
|
859b714980a87dd33b480fa7c8c75b1eacc533d6
|
[
"C++"
] | 1 |
C++
|
bit1120161907/shuduku09
|
49937d5996f045dde24a65465d698b4943be7187
|
1e0d0cd401291fb1137cdd8577ba7fbec2a74218
|
refs/heads/master
|
<file_sep>from sklearn.linear_model import SGDClassifier
from skmultiflow.core import BaseSKMObject, ClassifierMixin, MetaEstimatorMixin, MultiOutputMixin
class MultiOutputLearner(BaseSKMObject, ClassifierMixin, MetaEstimatorMixin, MultiOutputMixin):
"""
come form skmultiflow
"""
def __init__(self, base_estimator=SGDClassifier(max_iter=100)):
super().__init__(base_estimator)
<file_sep>from skmultiflow.lazy import KNNAdwin
from skmultiflow.meta.online_csb2 import OnlineCSB2
class OnlineCSB2(OnlineCSB2):
"""
come form skmultiflow
"""
def __init__(self,
base_estimator=KNNAdwin(),
n_estimators=10,
cost_positive=1,
cost_negative=0.1,
drift_detection=True,
random_state=None):
super().__init__(base_estimator,
n_estimators,
cost_positive,
cost_negative,
drift_detection,
random_state)<file_sep>from skmultiflow.bayes import NaiveBayes
from skmultiflow.meta.additive_expert_ensemble import AdditiveExpertEnsemble
class AdditiveExpertEnsemble(AdditiveExpertEnsemble):
"""
come from skmultiflow
"""
def __init__(self, n_estimators=5, base_estimator=NaiveBayes(), beta=0.8,
gamma=0.1, pruning='weakest'):
"""
Creates a new instance of AdditiveExpertEnsemble.
"""
super().__init__(n_estimators, base_estimator, beta,
gamma, pruning)
<file_sep>from sklearn.tree import DecisionTreeClassifier
from skmultiflow.meta.batch_incremental import BatchIncremental
class BatchIncremental(BatchIncremental):
"""
come from skmultiflow
"""
def __init__(self, base_estimator=DecisionTreeClassifier(), window_size=100, n_estimators=100):
super().__init__(base_estimator, window_size, n_estimators)
<file_sep>from skmultiflow.lazy import KNNAdwin
from skmultiflow.meta.oza_bagging import OzaBagging
class OzaBagging(OzaBagging):
"""
come from skmultiflow
"""
def __init__(self, base_estimator=KNNAdwin(), n_estimators=10, random_state=None):
super().__init__(base_estimator, n_estimators, random_state)
<file_sep>from skmultiflow.drift_detection.eddm import EDDM
class EDDM(EDDM):
"""
come form skmultiflow
"""
def __init__(self):
super().__init__()
<file_sep>from sklearn.linear_model import SGDRegressor
from skmultiflow.meta.regressor_chains import RegressorChain
class RegressorChain(RegressorChain):
"""
come from skmultiflow
"""
def __init__(self, base_estimator=SGDRegressor(), order=None, random_state=None):
super().__init__(base_estimator, order, random_state)
<file_sep>from skmultiflow.lazy import KNNAdwin
from skmultiflow.meta.oza_bagging_adwin import OzaBaggingAdwin
class OzaBaggingAdwin(OzaBaggingAdwin):
"""
come form skmultiflow
"""
def __init__(self, base_estimator=KNNAdwin(), n_estimators=10, random_state=None):
super().__init__(base_estimator, n_estimators, random_state)
<file_sep>from math import *
from skmultiflow.drift_detection.base_drift_detector import BaseDriftDetector
class HDDM_A(BaseDriftDetector):
def __init__(self, drift_confidence=0.001, warning_confidence=0.005, two_side_option=True):
super().__init__()
super().reset()
self.n_min = 0
self.c_min = 0
self.total_n = 0
self.total_c = 0
self.n_max = 0
self.c_max = 0
self.n_estimation = 0
self.c_estimation = 0
self.drift_confidence = drift_confidence
self.warning_confidence = warning_confidence
self.two_side_option = two_side_option
def add_element(self, prediction):
""" Add a new element to the statistics
Parameters
----------
prediction: int (either 0 or 1)
This parameter indicates whether the last sample analyzed was
correctly classified or not. 1 indicates an error (miss-classification).
Notes
-----
After calling this method, to verify if change was detected or if
the learner is in the warning zone, one should call the super method
detected_change, which returns True if concept drift was detected and
False otherwise.
"""
self.total_n += 1
self.total_c += prediction
if self.n_min == 0:
self.n_min = self.total_n
self.c_min = self.total_c
if self.n_max == 0:
self.n_max = self.total_n
self.c_max = self.total_c
cota = sqrt(1.0 / (2 * self.n_min) * log(1.0 / self.drift_confidence))
cota1 = sqrt(1.0 / (2 * self.total_n) * log(1.0 / self.drift_confidence))
if self.c_min / self.n_min + cota >= self.total_c / self.total_n + cota1 :
self.c_min = self.total_c
self.n_min = self.total_n
cota = sqrt(1.0 / (2 * self.n_max) * log(1.0 / self.drift_confidence))
if self.c_max / self.n_max - cota <= self.total_c / self.total_n - cota1:
self.c_max = self.total_c
self.n_max = self.total_n
if self._mean_incr(self.c_min, self.n_min, self.total_c, self.total_n, self.drift_confidence):
self.n_estimation = self.total_n - self.n_min
self.c_estimation = self.total_c - self.c_min
self.n_min = self.n_max = self.total_n = 0
self.c_min = self.c_max = self.total_c = 0
self.in_concept_change = True
self.in_warning_zone = False
elif self._mean_incr(self.c_min, self.n_min, self.total_c, self.total_n, self.warning_confidence):
self.in_concept_change = False
self.in_warning_zone = True
else:
self.in_concept_change = False
self.in_warning_zone = False
if self.two_side_option and self._mean_decr(self.c_max, self.n_max, self.total_c, self.total_n) :
self.n_estimation = self.total_n - self.n_max
self.c_estimation = self.total_c - self.c_max
self.n_min = self.n_max = self.total_n = 0
self.c_min = self.c_max = self.total_c = 0
self._update_estimations()
def _mean_incr(self, c_min, n_min, total_c, total_n, confidence):
if n_min == total_n:
return False
m = (total_n - n_min) / n_min * (1.0 / total_n)
cota = sqrt(m / 2 * log(1.0 / confidence))
return total_c / total_n - c_min / n_min >= cota
def _mean_decr(self, c_max, n_max, total_c, total_n):
if n_max == total_n:
return False
m = (total_n - n_max) / n_max * (1.0 / total_n)
cota = sqrt(m / 2 * log(1.0 / self.drift_confidence))
return c_max / n_max - total_c / total_n >= cota
def reset(self):
""" reset
Resets the change detector parameters.
"""
super().reset()
self.n_min = 0
self.c_min = 0
self.total_n = 0
self.total_c = 0
self.n_max = 0
self.c_max = 0
self.c_estimation = 0
self.n_estimation = 0
def _update_estimations(self):
""" update_estimations
Update the length estimation and delay.
"""
if self.total_n >= self.n_estimation:
self.c_estimation = self.n_estimation = 0
self.estimation = self.total_c / self.total_n
self.delay = self.total_n
else:
self.estimation = self.c_estimation / self.n_estimation
self.delay = self.n_estimation<file_sep>from skmultiflow.lazy import KNNAdwin
from skmultiflow.meta.online_boosting import OnlineBoosting
class OnlineBoosting(OnlineBoosting):
"""
come form skmultiflow
"""
def __init__(self, base_estimator=KNNAdwin(), n_estimators=10, drift_detection=True, random_state=None):
super().__init__(base_estimator, n_estimators, drift_detection, random_state)
<file_sep>import numpy as np
import math
from .base_distribution_detector import BaseDistributionDetector
class LSDDINC(BaseDistributionDetector):
def __init__(self, train_size=400, window1_size=200, window2_size=200, u=0.01, bootstrap_num=2000):
super().__init__()
self.n = window1_size
self.m = window2_size
self.bootstrap_num = bootstrap_num
self.n_t = train_size
self.u = u
self.sigma = None
self.lambd = None
self.window_slide = [None for _ in range(self.m)]
self.window_train = [None for _ in range(self.n_t)]
self.centers = None
self.win_sli_i = None
self.win_tra_i = None
self.i = None
self.t1 = None
self.Hl = None
self.h = None
self.reset()
def reset(self):
super().reset()
self.sigma = None
self.lambd = None
self.t1 = None
self.Hl = None
self.h = None
self.centers = None
self.win_sli_i = 0
self.win_tra_i = 0
self.i = 0
def add_element(self, input_value):
if self.in_concept_change:
self.reset()
input_value = np.asarray(input_value)
if input_value.ndim != 1:
raise ValueError("X should has one dimension")
# return
if self.win_tra_i < self.n_t:
self.window_train[self.win_tra_i] = input_value
self.win_tra_i += 1
if self.win_tra_i == self.n_t:
self.training()
return
if self.i < self.m:
self.window_slide[self.win_sli_i] = input_value
self.win_sli_i = (self.win_sli_i + 1) % self.m
self.i += 1
if self.i == self.m:
self.get_Hl_and_h()
self.centers = np.append(np.asarray(self.window_train), np.asarray(self.window_slide), axis=0)
return
# slide window_slide
self.h -= self.get_dia(x_new=input_value, x_old=self.window_slide[self.win_sli_i])
d = self.h.T.dot(self.Hl).dot(self.h)
if d > self.t1:
self.in_concept_change = True
self.window_slide[self.win_sli_i] = input_value
self.win_sli_i = (self.win_sli_i + 1) % self.n
def get_dia(self, x_new, x_old):
get_h_i_vec = np.vectorize(self.get_h_i, signature='(n),(n)->()')
dia = get_h_i_vec(self.centers, x_new)-get_h_i_vec(self.centers, x_old)
dia = 1.0 / self.m * dia
return dia.reshape((self.centers.shape[0], 1))
def training(self):
self.get_sigma()
self.get_lambda
if self.lambd is None:
self.lambd = 1.0
self.bootstrapping()
return
def get_Hl_and_h(self):
H, self.h = self.get_H_and_h(np.asarray(self.window_train), np.asarray(self.window_slide))
r = H.shape[0]
aux = np.linalg.inv(H + np.eye(r) * self.lambd)
self.Hl = aux*2 - aux.T.dot(H).dot(aux)
def get_sigma(self):
sum = 0
for xi in self.window_train:
for xj in self.window_train:
sum += self.get_distance(xi, xj)
self.sigma = sum / pow(self.n_t, 2)
def get_H_and_h(self, X1, X2):
r1, c1 = X1.shape
r2, c2 = X2.shape
if c1 != c2:
raise ValueError("c1 != c2.")
r = r1 + r2
X = np.append(X1, X2, axis=0)
H, h = [], []
for i in range(r):
get_H_i_j_vec = np.vectorize(self.get_H_i_j, signature='(n),(n)->()')
H.append(get_H_i_j_vec(X, X[i]))
H = np.asarray(H)
for i in range(r):
get_h_i_vec = np.vectorize(self.get_h_i, signature='(n),(n)->()')
h.append(np.mean(get_h_i_vec(X1, X[i])) - np.mean(get_h_i_vec(X2, X[i])))
h = np.asarray(h).reshape((r, 1))
return H, h
def get_H_i_j(self, ci, cj):
c = len(ci)
tmp = math.pow(math.pi * pow(self.sigma, 2), c * 0.5) * \
math.exp(-self.get_distance(ci, cj) / 4 / pow(self.sigma, 2))
return tmp
def get_h_i(self, ci, cj):
return math.exp(-self.get_distance(ci, cj) / 2 / pow(self.sigma, 2))
def get_distance(self, instance_one, instance_two):
one = np.array(instance_one).flatten()
two = np.array(instance_two).flatten()
return np.sqrt(np.sum(np.power(np.subtract(one, two), [2 for _ in range(one.size)])))
def get_lambda(self):
num, RD0 = 20, 0.2
_lambdas = np.flipud(np.logspace(-2, 1, 20))
for _lambda in _lambdas:
array = np.array(self.window_train)
ave_RD = 0
for i in range(num):
index_arr = np.random.randint(0, self.n_t, size=self.n)
data_sample1 = array[index_arr]
index_arr = np.random.randint(0, self.n_t, size=self.m)
data_sample2 = array[index_arr]
ave_RD += self.get_RD(data_sample1, data_sample2, _lambda)
ave_RD /= num
if ave_RD < RD0:
self.lambd = _lambda
return
return
def get_RD(self, X1, X2, _lambda):
H, h = self.get_H_and_h(X1, X2)
r = H.shape[0]
aux = np.linalg.inv(H + np.eye(r) * _lambda)
RD = h.T.dot(aux.dot(aux)).dot(h)[0][0] / (h.T.dot(aux).dot(h)[0][0]+1e-10)
return RD * _lambda
def bootstrappig(self):
array = np.array(self.window_train)
sample_result_arr = []
for i in range(self.bootstrap_num):
index_arr = np.random.randint(0, self.n_t, size=self.n)
data_sample1 = array[index_arr]
index_arr = np.random.randint(0, self.n_t, size=self.m)
data_sample2 = array[index_arr]
sample_result = self.get_d(data_sample1, data_sample2)
sample_result_arr.append(sample_result)
i = int(self.bootstrap_num * (1 - self.u))
auc_sample_arr_sorted = sorted(sample_result_arr)
t = auc_sample_arr_sorted[i]
eh0 = np.mean(np.asarray(sample_result_arr))
k = (1.0/self.n_t+1.0/self.m)/(1.0/self.n+1.0/self.m)-1
self.t1 = k * eh0 + t
def get_d(self, X1, X2):
H, h = self.get_H_and_h(X1, X2)
r = H.shape[0]
theta = np.linalg.inv(H + np.eye(r) * self.lambd).dot(h)
d = theta.T.dot(h) * 2 - theta.T.dot(H).dot(theta)
return d[0][0]<file_sep>from skmultiflow.drift_detection.ddm import DDM
class DDM(DDM):
"""
Come from skmultiflow
"""
def __init__(self, min_num_instances=30, warning_level=2.0, out_control_level=3.0):
super().__init__(min_num_instances, warning_level, out_control_level)<file_sep>import numpy as math
import math
from skmultiflow.drift_detection.base_drift_detector import BaseDriftDetector
class ECDD(BaseDriftDetector):
def __init__(self, min_num_instances=30, warning_level=0.5, _lambda=0.2):
super().__init__()
self.sample_count = None
self.miss_prob = None
self.miss_std = None
self.miss_sum = None
self.z_t = None
self.min_instances = min_num_instances
self.warning_level = warning_level
self._lambda = _lambda
self.reset()
def reset(self):
super().reset()
self.sample_count = 1.0
self.miss_prob = 0.0
self.miss_std = 0.0
self.miss_sum = 0.0
self.z_t = 0.0
def add_element(self, prediction):
if self.in_concept_change:
self.reset()
self.miss_sum += prediction
self.miss_prob = self.miss_sum/self.sample_count
self.miss_std = math.sqrt( self.miss_prob * (1.0 - self.miss_prob)* self._lambda * (1.0 - math.pow(1.0 - self._lambda, 2.0 * self.sample_count)) / (2.0 - self._lambda))
self.sample_count += 1
self.z_t += self._lambda * (prediction - self.z_t)
L_t = 3.97 - 6.56 * self.miss_prob + 48.73 * math.pow(self.miss_prob, 3) - 330.13 * math.pow(self.miss_prob, 5) + 848.18 * math.pow(self.miss_prob, 7)
self.estimation = self.miss_prob
self.in_concept_change = False
self.in_warning_zone = False
self.delay = 0
if self.sample_count < self.min_instances:
return
if self.z_t > self.miss_prob + L_t * self.miss_std:
self.in_concept_change = True
elif self.z_t > self.miss_prob + self.warning_level * L_t * self.miss_std:
self.in_warning_zone = True
else:
self.in_warning_zone = False<file_sep>from skmultiflow.bayes import NaiveBayes
from skmultiflow.meta.dynamic_weighted_majority import DynamicWeightedMajority
class DynamicWeightedMajority(DynamicWeightedMajority):
"""
come from skmultiflow
"""
def __init__(self, n_estimators=5, base_estimator=NaiveBayes(),
period=50, beta=0.5, theta=0.01):
"""
Creates a new instance of DynamicWeightedMajority.
"""
super().__init__(n_estimators, base_estimator,
period, beta, theta)<file_sep>from skmultiflow.drift_detection.base_drift_detector import BaseDriftDetector
from skmultiflow.drift_detection import ADWIN
from skmultiflow.meta.adaptive_random_forests import AdaptiveRandomForest
class AdaptiveRandomForest(AdaptiveRandomForest):
"""
come from skmultiflow
"""
def __init__(self,
n_estimators=10,
max_features='auto',
disable_weighted_vote=False,
lambda_value=6,
performance_metric='acc',
drift_detection_method: BaseDriftDetector = ADWIN(0.001),
warning_detection_method: BaseDriftDetector = ADWIN(0.01),
max_byte_size=33554432,
memory_estimate_period=2000000,
grace_period=50,
split_criterion='info_gain',
split_confidence=0.01,
tie_threshold=0.05,
binary_split=False,
stop_mem_management=False,
remove_poor_atts=False,
no_preprune=False,
leaf_prediction='nba',
nb_threshold=0,
nominal_attributes=None,
random_state=None):
"""AdaptiveRandomForest class constructor."""
super().__init__(n_estimators,
max_features,
disable_weighted_vote,
lambda_value,
performance_metric,
drift_detection_method,
warning_detection_method,
max_byte_size,
memory_estimate_period,
grace_period,
split_criterion,
split_confidence,
tie_threshold,
binary_split,
stop_mem_management,
remove_poor_atts,
no_preprune,
leaf_prediction,
nb_threshold,
nominal_attributes,
random_state)<file_sep>import numpy as math
import math
from scipy.stats import norm
from skmultiflow.drift_detection.base_drift_detector import BaseDriftDetector
class STEPD(BaseDriftDetector):
def __init__(self, window_size=30, alpha_warning=0.05, alpha_dirft=0.003):
super().__init__()
self.window_size = window_size
self.alpha_warning = alpha_warning
self.alpha_drift = alpha_dirft
self.stored_pred = [0 for i in range(int(self.window_size))]
self.first_pos = None
self.last_pos = None
self.ro = None
self.rr = None
self.wo = None
self.wr = None
self.no = None
self.nr = None
self.p = None
self.Z = None
self.size_inve_sum = None
self.reset()
def reset(self):
super().reset()
self.first_pos = 0
self.last_pos = -1
self.wo = 0.0
self.wr = 0.0
self.no = 0
self.nr = 0
def add_element(self, prediction):
if self.in_concept_change:
self.reset()
if self.nr == self.window_size: #Recent window is full.
self.wo += self.stored_pred[self.first_pos] #Oldest prediction in recent window
self.no += 1 #is moved to older window
self.wr -= self.stored_pred[self.first_pos]
self.first_pos += 1
if self.first_pos == self.window_size:
self.first_pos = 0
else: #Recent window grows
self.nr += 1
self.last_pos += 1 #Add prediction at the end of recent window
if self.last_pos == self.window_size:
self.last_pos = 0
self.stored_pred[self.last_pos] = prediction
self.wr += prediction
self.in_warning_zone = False
self.in_concept_change = False
self.delay = 0
if self.no >= self.window_size: #The same as: (no + nr) >= 2 * window_size
self.ro = self.no - self.wo #Number of correct predictions are calculated
self.rr = self.nr - self.wr
self.size_inve_sum = 1.0/self.no + 1.0/self.nr
self.p = (self.ro+self.rr)/(self.no+self.nr)
self.Z = abs(self.ro/self.no-self.rr/self.nr)
self.Z = self.Z - self.size_inve_sum * 0.5
self.Z = self.Z/( math.sqrt(self.p*(1.0-self.p)*self.size_inve_sum)+1e-18 )
self.Z = norm.ppf(abs(self.Z))
self.Z = 2 * (1 - self.Z);
if self.Z < self.alpha_drift:
self.in_concept_change = True
elif self.Z < self.alpha_warning:
self.in_warning_zone = True
else:
self.in_warning_zone = False
<file_sep>from .adwin import ADWIN
from .ddm import DDM
from .eddm import EDDM
from .ecdd import ECDD
from .page_hinkley import PageHinkley
from .stepd import STEPD
from .fw_ecdd import FWECDD
from .hddm_a import HDDM_A
from .hddm_w import HDDM_W
from .cusum_dm import CusumDM
from .gma_dm import GMADM
from .rddm import RDDM
from .dde import DDE
__all__ = ["ADWIN", "DDM", "ECDD", "EDDM", "PageHinkley", "STEPD", "FWECDD", "HDDM_A", "HDDM_W", "CusumDM", "GMADM", "RDDM", "DDE"]<file_sep>from skmultiflow.lazy import KNNAdwin
from skmultiflow.meta.online_smote_bagging import OnlineSMOTEBagging
class OnlineSMOTEBagging(OnlineSMOTEBagging):
"""
come from skmultiflow
"""
def __init__(self, base_estimator=KNNAdwin(), n_estimators=10, sampling_rate=1, drift_detection=True,
random_state=None):
super().__init__(base_estimator, n_estimators, sampling_rate, drift_detection,
random_state)
<file_sep>from skmultiflow.drift_detection.page_hinkley import PageHinkley
class PageHinkley(PageHinkley):
"""
come from skmultiflow
"""
def __init__(self, min_instances=30, delta=0.005, threshold=50, alpha=1 - 0.0001):
super().__init__(min_instances, delta, threshold, alpha)
<file_sep>from math import *
from skmultiflow.drift_detection.base_drift_detector import BaseDriftDetector
class HDDM_W(BaseDriftDetector):
class SampleInfo:
def __init__(self):
self.EWMA_estimator = -1.0
self.independent_bounded_condition_sum = None
def __init__(self, drift_confidence=0.001, warning_confidence=0.005, lambda_option=0.050, two_side_option=True):
super().__init__()
super().reset()
self.total = self.SampleInfo()
self.sample1_decr_monitor = self.SampleInfo()
self.sample1_incr_monitor = self.SampleInfo()
self.sample2_decr_monitor = self.SampleInfo()
self.sample2_incr_monitor = self.SampleInfo()
self.incr_cutpoint = float("inf")
self.decr_cutpoint = float("inf")
self.width = 0
self.delay = 0
self.drift_confidence = drift_confidence
self.warning_confidence = warning_confidence
self.lambda_option = lambda_option
self.two_side_option = two_side_option
def add_element(self, prediction):
""" Add a new element to the statistics
Parameters
----------
prediction: int (either 0 or 1)
This parameter indicates whether the last sample analyzed was
correctly classified or not. 1 indicates an error (miss-classification).
Notes
-----
After calling self method, to verify if change was detected or if
the learner is in the warning zone, one should call the super method
detected_change, which returns True if concept drift was detected and
False otherwise.
"""
aux_decay_rate = 1.0 - self.lambda_option
self.width += 1
if self.total.EWMA_estimator < 0:
self.total.EWMA_estimator = prediction
self.total.independent_bounded_condition_sum = 1
else:
self.total.EWMA_estimator = self.lambda_option * prediction + aux_decay_rate * self.total.EWMA_estimator
self.total.independent_bounded_condition_sum = \
self.lambda_option * self.lambda_option \
+ aux_decay_rate * aux_decay_rate * self.total.independent_bounded_condition_sum
self._update_incr_statistics(prediction, self.drift_confidence)
if self._monitor_mean_incr(self.drift_confidence):
self.reset()
self.in_concept_change = True
self.in_warning_zone = False
return
elif self._monitor_mean_incr(self.warning_confidence):
self.in_concept_change = False
self.in_warning_zone = True
else:
self.in_concept_change = False
self.in_warning_zone = False
self._update_decr_statistics(prediction, self.drift_confidence)
if self.two_side_option and self._monitor_mean_decr(self.drift_confidence):
self.reset()
self.estimation = self.total.EWMA_estimator
def _detect_mean_increment(self, sample1, sample2, confidence):
if sample1.EWMA_estimator < 0 or sample2.EWMA_estimator < 0:
return False
bound = sqrt((sample1.independent_bounded_condition_sum
+ sample2.independent_bounded_condition_sum) * log(1 / confidence) / 2)
return sample2.EWMA_estimator - sample1.EWMA_estimator > bound
def _monitor_mean_incr(self, confidence):
return self._detect_mean_increment(self.sample1_incr_monitor, self.sample2_incr_monitor, confidence)
def _monitor_mean_decr(self, confidence):
return self._detect_mean_increment(self.sample2_decr_monitor, self.sample1_decr_monitor, confidence)
def _update_incr_statistics(self, value, confidence):
aux_decay = 1.0 - self.lambda_option
epsilon = sqrt(self.total.independent_bounded_condition_sum * log(1.0 / confidence) / 2)
if self.total.EWMA_estimator + epsilon < self.incr_cutpoint:
self.incr_cutpoint = self.total.EWMA_estimator + epsilon
self.sample1_incr_monitor.EWMA_estimator = self.total.EWMA_estimator
self.sample1_incr_monitor.independent_bounded_condition_sum = self.total.independent_bounded_condition_sum
self.sample2_incr_monitor = self.SampleInfo()
self.delay = 0
else:
self.delay += 1
if self.sample2_incr_monitor.EWMA_estimator < 0:
self.sample2_incr_monitor.EWMA_estimator = value
self.sample2_incr_monitor.independent_bounded_condition_sum = 1
else:
self.sample2_incr_monitor.EWMA_estimator = \
self.lambda_option * value + aux_decay * self.sample2_incr_monitor.EWMA_estimator
self.sample2_incr_monitor.independent_bounded_condition_sum = \
self.lambda_option * self.lambda_option + \
aux_decay * aux_decay * self.sample2_incr_monitor.independent_bounded_condition_sum
def _update_decr_statistics(self, value, confidence):
aux_decay = 1.0 - self.lambda_option
epsilon = sqrt(self.total.independent_bounded_condition_sum * log(1.0 / confidence) / 2)
if self.total.EWMA_estimator - epsilon > self.decr_cutpoint:
self.decr_cutpoint = self.total.EWMA_estimator - epsilon
self.sample1_decr_monitor.EWMA_estimator = self.total.EWMA_estimator
self.sample1_decr_monitor.independent_bounded_condition_sum = self.total.independent_bounded_condition_sum
self.sample2_decr_monitor = self.SampleInfo()
else:
if self.sample2_decr_monitor.EWMA_estimator < 0:
self.sample2_decr_monitor.EWMA_estimator = value
self.sample2_decr_monitor.independent_bounded_condition_sum = 1
else:
self.sample2_decr_monitor.EWMA_estimator = \
self.lambda_option * value + aux_decay * self.sample2_decr_monitor.EWMA_estimator
self.sample2_decr_monitor.independent_bounded_condition_sum = \
self.lambda_option * self.lambda_option \
+ aux_decay * aux_decay * self.sample2_decr_monitor.independent_bounded_condition_sum
def reset(self):
""" reset
Resets the change detector parameters.
"""
super().reset()
self.total = self.SampleInfo()
self.sample1_decr_monitor = self.SampleInfo()
self.sample1_incr_monitor = self.SampleInfo()
self.sample2_decr_monitor = self.SampleInfo()
self.sample2_incr_monitor = self.SampleInfo()
self.incr_cutpoint = float("inf")
self.decr_cutpoint = float("inf")
self.width = 0
self.delay = 0<file_sep>from skmultiflow.drift_detection.adwin import ADWIN
class ADWIN(ADWIN):
"""
come from skmultiflow
"""
def __init__(self, delta=.002):
super().__init__(delta)
<file_sep>from skmultiflow.bayes import NaiveBayes
from skmultiflow.meta.accuracy_weighted_ensemble import AccuracyWeightedEnsemble
class AccuracyWeightedEnsemble(AccuracyWeightedEnsemble):
"""
come from skmultiflow
"""
def __init__(self, n_estimators=10, n_kept_estimators=30,
base_estimator=NaiveBayes(), window_size=200, n_splits=5):
""" Create a new ensemble"""
super().__init__(n_estimators, n_kept_estimators,
base_estimator, window_size, n_splits)
<file_sep>from math import *
from skmultiflow.drift_detection.base_drift_detector import BaseDriftDetector
class RDDM(BaseDriftDetector):
def __init__(self, min_num_instances=129, warning_level=1.773, drift_level=2.258, max_size_concept=40000, min_size_stable_concept=7000, warn_limit=1400):
super().__init__()
self.min_num_instances = min_num_instances
self.warning_level = warning_level
self.drift_level = drift_level
self.max_size_concept = max_size_concept
self.min_size_stable_concept = min_size_stable_concept
self.warn_limit = warn_limit
self.stored_predictions = [0 for i in range(int(self.min_size_stable_concept))]
self.num_stored_instances = 0
self.first_pos = 0
self.last_pos = -1
self.last_warn_pos = -1
self.last_warn_inst = -1
self.inst_num = 0
self.rddm_drift = False
self.in_concept_change = False
self.miss_num = 1
self.miss_prob = 1
self.miss_std = 0
self.reset()
self.miss_prob_min = float("inf")
self.miss_sd_min = float("inf")
self.miss_prob_sd_min = float("inf")
def reset(self):
""" reset
Resets the change detector parameters.
"""
# super().reset()
self.miss_num = 1
self.miss_prob = 1
self.miss_std = 0
if self.in_concept_change:
self.miss_prob_min = float("inf")
self.miss_sd_min = float("inf")
self.miss_prob_sd_min = float("inf")
def add_element(self, prediction):
# if self.in_concept_change:
# self.reset()
if self.rddm_drift:
self.reset()
if self.last_warn_pos != -1:
self.first_pos = self.last_warn_pos
self.num_stored_instances = self.last_pos - self.first_pos + 1
if self.num_stored_instances <= 0:
self.num_stored_instances += self.min_size_stable_concept
pos = self.first_pos
for i in range(self.num_stored_instances):
self.miss_prob = self.miss_prob + (self.stored_predictions[pos]-self.miss_prob) / self.miss_num
self.miss_std = sqrt(self.miss_prob * (1 - self.miss_prob) / self.miss_num)
if self.in_concept_change and self.miss_num > self.min_num_instances and self.miss_prob + self.miss_std < self.miss_prob_sd_min:
self.miss_prob_min = self.miss_prob
self.miss_sd_min = self.miss_std
self.miss_prob_sd_min = self.miss_prob + self.miss_std
self.miss_num += 1
pos = (pos + 1) % self.min_size_stable_concept
self.last_warn_pos = -1
self.last_warn_inst = -1
self.rddm_drift = False
self.in_concept_change = False
self.last_pos = (self.last_pos + 1) % self.min_size_stable_concept
self.stored_predictions[self.last_pos] = prediction
if self.num_stored_instances < self.min_size_stable_concept:
self.num_stored_instances += 1
else:
self.first_pos = (self.first_pos + 1) % self.min_size_stable_concept
if self.last_warn_pos == self.last_pos:
self.last_warn_pos = -1
self.miss_prob = self.miss_prob + (prediction - self.miss_prob) / self.miss_num
self.miss_std = sqrt(self.miss_prob * (1 - self.miss_prob) / self.miss_num)
self.inst_num += 1
self.miss_num += 1
self.estimation = self.miss_prob
self.in_warning_zone = False
if self.miss_num <= self.min_num_instances:
return
if self.miss_prob + self.miss_std < self.miss_prob_sd_min:
self.miss_prob_min = self.miss_prob
self.miss_sd_min = self.miss_std
self.miss_prob_sd_min = self.miss_prob + self.miss_std
if self.miss_prob + self.miss_std > self.miss_prob_min + self.drift_level * self.miss_sd_min:
self.in_concept_change = True
self.rddm_drift = True
if self.last_warn_inst == -1:
self.first_pos = self.last_pos # DDM Drift without previous warning
self.min_num_instances = 1
return
if self.miss_prob + self.miss_std > self.miss_prob_min + self.warning_level * self.miss_sd_min:
# Warning level for warn_limit consecutive instances will force drifts
if self.last_warn_inst != -1 and self.last_warn_inst + self.warn_limit <= self.inst_num:
self.in_concept_change = True
self.rddm_drift = True
self.first_pos = self.last_pos
self.num_stored_instances = 1
self.last_warn_pos = -1
self.last_warn_inst = -1
return
# Warning Zone
self.in_warning_zone = True
if self.last_warn_inst == -1:
self.last_warn_inst = self.inst_num
self.last_warn_pos = self.last_pos
else:
self.last_warn_inst = -1
self.last_warn_pos = -1
if self.miss_num > self.max_size_concept and not self.in_warning_zone:
self.rddm_drift = True<file_sep>import numpy as np
import math
import random
from .base_distribution_detector import BaseDistributionDetector
class RD(BaseDistributionDetector):
def __init__(self, window_size=20, n=400, p=0.99, type="rd1", sample_size=500):
super().__init__()
self.window_size = window_size
self.window_ref = [None for _ in range(self.window_size)]
self.window_sli = [None for _ in range(self.window_size)]
self.n = n
self.p = p
if type != "rd1" and type != "rd2":
raise ValueError("RD should be rd1 or rd2")
self.type = type
self.sample_size = sample_size
self.win_ref_i = None
self.win_sli_i = None
self.i = None
self.alpha = None
self.rd = None
self.sample()
self.reset()
def sample(self):
samples = []
size = int(self.n / 2)
for i in range(self.sample_size):
X1 = np.random.uniform(0, 1, size)
X2 = np.random.uniform(0, 1, size)
samples.append(self.get_rd(X1.tolist(), X2.tolist()))
samples = sorted(samples)
self.alpha = samples[int(self.p * self.sample_size)]
return
def reset(self):
super().reset()
self.win_ref_i = 0
self.win_sli_i = 0
self.i = 0
self.rd = -1
def add_element(self, input_value):
if self.in_concept_change:
self.reset()
if self.win_ref_i < self.window_size:
self.window_ref[self.win_ref_i] = input_value
self.win_ref_i = self.win_ref_i + 1
return
self.window_sli[self.win_sli_i] = input_value
self.win_sli_i = (self.win_sli_i + 1) % self.window_size
self.i += 1
if self.i < self.window_size:
return
self.rd = self.get_rd(self.window_ref, self.window_sli)
if self.rd > self.alpha:
self.in_concept_change = True
def get_rd(self, X1, X2):
X = X1 + X2
for i in range(len(X)):
if i < len(X1):
X[i] = [X[i], 1 / len(X1)]
else:
X[i] = [X[i], -1 / len(X2)]
X = sorted(X, key=(lambda x: x[0]))
pre = [0 for _ in range(len(X) + 1)]
for i in range(1, len(X) + 1):
pre[i] = pre[i - 1] + X[i-1][1]
rd = 0
for l in range(1, len(X) + 1):
for r in range(l, len(X) + 1):
num = abs(pre[r] - pre[l - 1])
aux = (r - l + 1) / len(X)
dem = math.sqrt(min(aux, 1 - aux))
if self.type == "rd2":
dem = math.sqrt(aux * (1 - aux))
if dem == 0:
continue
rd = max(rd, num / dem)
return rd
<file_sep>import numpy as np
import math
import time
from .kdqtree import KDQTree
from .base_distribution_detector import BaseDistributionDetector
class ITA(BaseDistributionDetector):
def __init__(self, window_size=200, side=pow(2, -10), leaf_size=100, persistence_factor=0.05, asl=0.01,
bootstrap_num=500):
super().__init__()
self.window_size = window_size
self.side = side
self.leaf_size = leaf_size
self.persistence_factor = persistence_factor
self.asl = asl
self.bootstrap_num = bootstrap_num
self.threshold = window_size * persistence_factor
self.window_his = None
self.index = None
self.kdqtree = None
self.leafs = None
self.values = None
self.kl_distance = None
self.higher = None
self.count = None
self.number_sum = None
self.reset()
def reset(self):
super().reset()
self.window_his = []
self.window_queue = [0 for _ in range(self.window_size)]
self.index = 0
self.number_sum = 0
self.count = 0
self.kdqtree = None
self.leafs = None
self.kl_distance = None
def add_element(self, input_value):
if self.in_concept_change:
self.reset()
if input_value.ndim == 0:
input_value = np.asarray([input_value])
if len(self.window_his) < self.window_size:
self.window_his.append(input_value)
if len(self.window_his) == self.window_size:
self.get_new_kdqtree()
time1 = time.time()
self.higher = self.bootstrap(data=np.asarray(self.window_his), B=self.bootstrap_num, c=self.asl,
func=self.get_kl_distance)
time2 = time.time()
print("Bootstrap time: {}", time2 - time1)
else:
self.number_sum += 1
if self.number_sum > self.window_size:
changed_leaf1 = self.window_queue[self.index]
self.leafs[changed_leaf1] -= 1
self.window_queue[self.index] = self.kdqtree.query(np.asarray([input_value]))[0][0]
changed_leaf2 = self.window_queue[self.index]
self.leafs[changed_leaf2] += 1
self.kl_distance -= self.values[changed_leaf1]
pv = self.kdqtree.nodes_per_leaf[changed_leaf1]
qv = self.leafs[changed_leaf1]
self.values[changed_leaf1] = (pv + 0.5) * math.log((pv + 0.5) / (qv + 0.5))
self.kl_distance += self.values[changed_leaf1]
self.kl_distance -= self.values[changed_leaf2]
pv = self.kdqtree.nodes_per_leaf[changed_leaf2]
qv = self.leafs[changed_leaf2]
self.values[changed_leaf2] = (pv + 0.5) * math.log((pv + 0.5) / (qv + 0.5))
self.kl_distance += self.values[changed_leaf2]
self.index = (self.index + 1) % self.window_size
else:
self.window_queue[self.index] = self.kdqtree.query(np.asarray([input_value]))[0][0]
self.leafs[self.window_queue[self.index]] += 1
self.index = (self.index + 1) % self.window_size
if self.number_sum == self.window_size:
self.kl_distance = 0
for i in range(len(self.leafs)):
pv = self.kdqtree.nodes_per_leaf[i]
qv = self.leafs[i]
self.values[i] = (pv + 0.5) * math.log((pv + 0.5) / (qv + 0.5))
self.kl_distance += self.values[i]
if self.kl_distance is None:
return
# print(self.kl_distance)
if self.kl_distance > self.higher:
self.count += 1
if self.count > self.threshold:
print("changed")
self.in_concept_change = True
else:
self.count = 0
def get_new_kdqtree(self):
self.kdqtree = KDQTree(X=np.asarray(self.window_his), leaf_size=self.leaf_size, min_side=self.side)
self.leafs = [0 for _ in range(len(self.kdqtree.nodes_per_leaf))]
self.values = [0 for _ in range(len(self.kdqtree.nodes_per_leaf))]
def get_kl_distance(self, X1, X2):
kdqtree = KDQTree(X1, leaf_size=self.leaf_size, min_side=self.side)
leafs = [0 for _ in range(len(kdqtree.nodes_per_leaf))]
leaf_id_all = kdqtree.query(X2)
for id in leaf_id_all:
leafs[id[0]] += 1
kl_distance = 0.0
for i in range(len(leafs)):
pv = kdqtree.nodes_per_leaf[i]
qv = leafs[i]
kl_distance = (pv + 0.5) * math.log((pv + 0.5) / (qv + 0.5))
return kl_distance
def bootstrap(self, data, B, c, func):
# B = 1
array = np.array(data)
n = len(array)
sample_result_arr = []
for i in range(B):
index_arr = np.random.randint(0, n, size=n)
data_sample1 = array[index_arr]
index_arr = np.random.randint(0, n, size=n)
data_sample2 = array[index_arr]
sample_result = func(data_sample1, data_sample2)
sample_result_arr.append(sample_result)
k2 = int(B * (1 - c))
auc_sample_arr_sorted = sorted(sample_result_arr)
higher = auc_sample_arr_sorted[k2]
return higher
<file_sep>from skmultiflow.core import BaseSKMObject, ClassifierMixin, MetaEstimatorMixin
from skmultiflow.lazy import KNNAdwin
class OnlineAdaC2(BaseSKMObject, ClassifierMixin, MetaEstimatorMixin):
"""
come form skmultiflow
"""
def __init__(self,
base_estimator=KNNAdwin(),
n_estimators=10,
cost_positive=1,
cost_negative=0.1,
drift_detection=True,
random_state=None):
super().__init__(base_estimator,
n_estimators,
cost_positive,
cost_negative,
drift_detection,
random_state)
<file_sep>import matplotlib.pyplot as plt
import numpy as np
def draw_concise_diagram(x):
_, ax = plt.subplots(1, 1, figsize=(8, 6))
t = range(x.size)
ax.plot(t, x, "b-", lw=0.5)
ax.set_xlim(-.01 * x.size, x.size * 1.01 - 1)
ax.set_xlabel("index", fontsize=14)
ax.set_ylabel("value", fontsize=14)
ymin, ymax = x[np.isfinite(x)].min(), x[np.isfinite(x)].max()
yrange = ymax - ymin if ymax > ymin else 1
ax.set_ylim(ymin - 0.1 * yrange, ymax + 0.1 * yrange)
plt.tight_layout()
plt.show()<file_sep>import numpy as np
import math
import time
from scipy.stats import norm
from .base_distribution_detector import BaseDistributionDetector
class LSDDCDT(BaseDistributionDetector):
def __init__(self, train_size=400, window_size=200, u_s=0.02, u_w=0.01, u_c=0.001, bootstrap_num=2000):
super().__init__()
if window_size * 2 > train_size:
raise ValueError("window_size * 2 > train_size.")
self.n = window_size
self.m = bootstrap_num
self.n_t = train_size
self.u_s = u_s
self.u_w = u_w
self.u_c = u_c
self.p_w = None
self.p_c = None
self.t_s = None
self.t_w = None
self.t_c = None
self.sigma = None
self.lambd = None
self.window_reference = [None for _ in range(self.n)]
self.window_slide = [None for _ in range(self.n)]
self.window_train = [None for _ in range(self.n_t)]
self.win_ref_i = None
self.win_sli_i = None
self.win_tra_i = None
self.i = None
self.warning_num = None
self.reset()
def reset(self):
super().reset()
self.t_s = None
self.t_w = None
self.t_c = None
self.sigma = None
self.lambd = None
self.win_ref_i = 0
self.win_sli_i = 0
self.win_tra_i = 0
self.i = 0
self.warning_num = 0
def add_element(self, input_value):
if self.in_concept_change:
self.reset()
input_value = np.asarray(input_value)
if input_value.ndim != 1:
raise ValueError("X should has one dimension")
# return
if self.win_tra_i < self.n_t:
self.window_train[self.win_tra_i] = input_value
self.win_tra_i += 1
if self.win_tra_i == self.n_t:
self.training()
return
if self.win_ref_i < self.n:
self.window_reference[self.win_ref_i] = input_value
self.win_ref_i += 1
return
self.i += 1
if self.i < self.n:
self.window_slide[self.win_sli_i] = input_value
self.win_sli_i = (self.win_sli_i + 1) % self.n
return
# slide window_slide
self.window_slide[self.win_sli_i] = input_value
self.win_sli_i = (self.win_sli_i + 1) % self.n
# calculate d^2
d = self.get_d(np.asarray(self.window_reference), np.asarray(self.window_slide))
print(str(self.i) + ": " + str(d))
if d > self.t_w or self.in_warning_zone:
self.in_warning_zone = True
self.warning_num += 1
if d > self.t_c:
self.in_concept_change = True
if d < self.t_s or self.warning_num >= self.n:
self.in_warning_zone = False
self.warning_num = 0
self.reservoir_sampling(input_value)
else:
self.reservoir_sampling(input_value)
self.warning_num = 0
def training(self):
self.get_sigma()
self.get_lambda()
if self.lambd is None:
self.lambd = 1.0
self.bootstrapping()
print("Ts: " + str(self.t_s))
print("Tw: " + str(self.t_w))
print("Tc: " + str(self.t_c))
return
def reservoir_sampling(self, input_value):
r = np.random.randint(0, self.n + self.i + 1)
if r < self.n - 1:
self.window_reference[r] = input_value
return
def get_d(self, X1, X2):
H, h = self.get_H_and_h(X1, X2)
r = H.shape[0]
theta = np.linalg.inv(H + np.eye(r) * self.lambd).dot(h)
d = theta.T.dot(h) * 2 - theta.T.dot(H).dot(theta)
return d[0][0]
def get_H_and_h(self, X1, X2):
r1, c1 = X1.shape
r2, c2 = X2.shape
if c1 != c2:
raise ValueError("c1 != c2.")
r = r1 + r2
X = np.append(X1, X2, axis=0)
H, h = [], []
for i in range(r):
get_H_i_j_vec = np.vectorize(self.get_H_i_j, signature='(n),(n)->()')
H.append(get_H_i_j_vec(X, X[i]))
H = np.asarray(H)
for i in range(r):
# get_distance_vec = np.vectorize(self.get_distance, signature='(n),(n)->()')
# h.append(np.mean(get_distance_vec(X1, X[i])) - np.mean(get_distance_vec(X2, X[i])))
get_h_i_vec = np.vectorize(self.get_h_i, signature='(n),(n)->()')
h.append(np.mean(get_h_i_vec(X1, X[i])) - np.mean(get_h_i_vec(X2, X[i])))
h = np.asarray(h).reshape((r, 1))
return H, h
def get_H_i_j(self, ci, cj):
c = len(ci) #这个地方可能会有些问题
tmp = math.pow(math.pi * pow(self.sigma, 2), c * 0.5) * \
math.exp(-self.get_distance(ci, cj) / 4 / pow(self.sigma, 2))
return tmp
def get_h_i(self, ci, cj):
return math.exp(-self.get_distance(ci, cj) / 2 / pow(self.sigma, 2))
def get_distance(self, instance_one, instance_two):
one = np.array(instance_one).flatten()
two = np.array(instance_two).flatten()
return np.sqrt(np.sum(np.power(np.subtract(one, two), [2 for _ in range(one.size)])))
def get_sigma(self):
sum = 0
for xi in self.window_train:
for xj in self.window_train:
sum += self.get_distance(xi, xj)
self.sigma = sum / pow(self.n_t, 2)
def bootstrapping(self):
array = np.array(self.window_train)
sample_result_arr = []
for i in range(self.m):
index_arr = np.random.randint(0, self.n_t, size=self.n)
data_sample1 = array[index_arr]
index_arr = np.random.randint(0, self.n_t, size=self.n)
data_sample2 = array[index_arr]
sample_result = self.get_d(data_sample1, data_sample2)
sample_result_arr.append(sample_result)
k_s = int(self.m * (1 - self.u_s))
k_w = int(self.m * (1 - self.u_w))
k_c = int(self.m * (1 - self.u_c))
auc_sample_arr_sorted = sorted(sample_result_arr)
self.t_s = auc_sample_arr_sorted[k_s]
self.t_w = auc_sample_arr_sorted[k_w]
self.t_c = auc_sample_arr_sorted[k_c]
def get_lambda(self):
num, RD0 = 20, 0.25
_lambdas = np.flipud(np.logspace(-2, 1, 20))
for _lambda in _lambdas:
array = np.array(self.window_train)
ave_RD = 0
for i in range(num):
index_arr = np.random.randint(0, self.n_t, size=self.n)
data_sample1 = array[index_arr]
index_arr = np.random.randint(0, self.n_t, size=self.n)
data_sample2 = array[index_arr]
ave_RD += self.get_RD(data_sample1, data_sample2, _lambda)
ave_RD /= num
if ave_RD < RD0:
self.lambd = _lambda
return
return
def get_RD(self, X1, X2, _lambda):
H, h = self.get_H_and_h(X1, X2)
r = H.shape[0]
aux = np.linalg.inv(H + np.eye(r) * _lambda)
RD = h.T.dot(aux.dot(aux)).dot(h)[0][0] / (h.T.dot(aux).dot(h)[0][0]+1e-10)
return RD * _lambda
<file_sep>import numpy as np
from skmultiflow.drift_detection.base_drift_detector import BaseDriftDetector
class GMADM(BaseDriftDetector):
def __init__(self, min_num_instances=30, alpha=0.99, _lambda=1):
super().__init__()
self.sample_count = None
self.miss_prob = None
self.miss_sum = None
self.min_instances = min_num_instances
self.alpha = alpha
self._lambda = _lambda
self.reset()
def reset(self):
""" reset
Resets the change detector parameters.
"""
super().reset()
self.sample_count = 1
self.miss_prob = 0.0
self.miss_sum = 0.0
def add_element(self, prediction):
""" Add a new element to the statistics
Parameters
----------
prediction: int (either 0 or 1)
This parameter indicates whether the last sample analyzed was
correctly classified or not. 1 indicates an error (miss-classification).
Notes
-----
After calling this method, to verify if change was detected or if
the learner is in the warning zone, one should call the super method
detected_change, which returns True if concept drift was detected and
False otherwise.
"""
if self.in_concept_change:
self.reset()
self.miss_prob = self.miss_prob + (prediction - self.miss_prob) / float(self.sample_count)
self.miss_sum = self.alpha * self.miss_sum + (1.0 - self.alpha) * (prediction - self.miss_prob)
self.sample_count += 1
self.estimation = self.miss_prob
self.in_concept_change = False
self.in_warning_zone = False
self.delay = 0
if self.sample_count < self.min_instances:
return
if self.miss_sum > self._lambda:
self.in_concept_change = True
<file_sep>from skmultiflow.drift_detection.base_drift_detector import BaseDriftDetector
from libcdd.error_rate_based import *
class DDE(BaseDriftDetector):
def __init__(self, outlier=100, detectors="HDDM_A,HDDM_W,DDM", min_drift_weight=1):
super().__init__()
self.outlier = None
self.result = []
self.warning_level = None
self.drift_level = None
self.inst_number = None
self.index = None
self.min_drift_weight = None
self.change_detector_pool = []
# self.ddmstring = []
self.value_list = detectors
self.ensemble()
self.result = [0 for i in range(len(self.change_detector_pool))]
self.min_drift_weight = min_drift_weight
self.outlier = outlier
self.reset()
def reset(self):
""" reset
Resets the change detector parameters.
"""
super().reset()
self.ensemble()
self.inst_number = 0
for i in range(len(self.result)):
self.result[i] = 0
def add_element(self, prediction):
if self.in_concept_change:
self.reset()
self.inst_number += 1
self.drift_level = 0
self.warning_level = 0
for i in range(len(self.change_detector_pool)):
if self.result[i] < 1: # not in drift
self.change_detector_pool[i].add_element(prediction)
if self.change_detector_pool[i].in_concept_change:
self.result[i] = self.inst_number
self.drift_level += 1
else:
if self.change_detector_pool[i].in_warning_zone:
self.warning_level += 1
else: # in drift
if self.result[i] + self.outlier < self.inst_number:
self.result[i] = 0
else:
self.drift_level += 1
if (self.drift_level >= self.min_drift_weight):
break
if self.warning_level + self.drift_level < self.min_drift_weight:
self.in_warning_zone = False
else:
if self.drift_level < self.min_drift_weight:
self.in_warning_zone = True
else:
# self.reset()
self.in_concept_change = True
def ensemble(self):
if self.value_list != "":
self.split = self.value_list.split(",")
self.change_detector_pool = []
# self.ddmstring = []
if len(self.split) > 0:
for i in range(len(self.split)):
# if self.split[i].index("(") > -1:
# self.split[i] = self.split[i].substring(self.split[i].index("(")+1)
# self.split[i] = self.split[i].substring(0, self.split[i].index(")"))
self.change_detector_pool.append(globals()[self.split[i]]())
# self.ddmstring.append(self.split[i])
else:
self.change_detector_pool.append(globals()[self.value_list]())
# self.ddmstring.append(self.value_list)
<file_sep>from . import data_distribution_based
from . import ensemble
from . import error_rate_based
__all__ = ['data_distribution_based', 'ensemble', 'error_rate_based']<file_sep>from skmultiflow.core import BaseSKMObject
from skmultiflow.utils.utils import *
class KDQTree(BaseSKMObject):
def __init__(self, X, categorical_list=None, leaf_size=100, min_side=pow(2, -10)):
super().__init__()
self.X = np.asarray(X)
if self.X.ndim != 2: # 数组维度
raise ValueError("X should be a matrix, or array-like, of shape (n_samples, n_features).")
self.X = self.X.astype(np.float64)
self.n_samples, self.n_features = self.X.shape
self.categorical_list = categorical_list
self.leaf_size = leaf_size
self.min_side = min_side
self.nodes_per_leaf = []
self.root = None
self.maxes = np.amax(self.X, axis=0)
self.mins = np.amin(self.X, axis=0)
self.cur = [1.0 for _ in range(self.n_features)]
self.aux_query_X = None
indexes = [i for i in range(self.n_samples)]
self.root = self.__build(0, indexes)
def __build(self, col, indexes):
root = KDQTreeNode()
root.split_axis = col
maxval = self.maxes[col]
minval = self.mins[col]
size = len(indexes)
if self.cur[col] < self.min_side or size < self.leaf_size:
root.is_leaf = True
root.id = len(self.nodes_per_leaf)
self.nodes_per_leaf.append(size)
return root
midval = (maxval + minval) / 2
root.split_value = midval
root.split_axis = col
left_indexes = []
right_indexes = []
for row in indexes:
if self.X[row, col] > midval:
right_indexes.append(row)
else:
left_indexes.append(row)
# Create left son
self.cur[col] /= 2
self.maxes[col] = midval
root.left_son = self.__build((col+1)%self.n_features, left_indexes)
self.maxes[col] = maxval
# Create right son
self.mins[col] = midval
root.right_son = self.__build((col+1)%self.n_features, right_indexes)
self.mins[col] = minval
self.cur[col] *= 2
return root
def query(self, X):
r, c = get_dimensions(X)
leaf_id_all = []
for i in range(r):
leaf_ids = []
self.aux_query_X = X[i].astype(np.float64)
leaf_ids.append(self._query(self.root))
leaf_id_all.append(leaf_ids)
return leaf_id_all
def _query(self, root):
if root.is_leaf:
return root.id
if self.aux_query_X[root.split_axis]<root.split_value:
return self._query(root.left_son)
else:
return self._query(root.right_son)
class KDQTreeNode(object):
def __init__(self):
self.left_son = None
self.right_son = None
self.split_axis = None
self.split_value = None
self.is_leaf = False
self.id = None
<file_sep>import copy as cp
import numpy as np
from skmultiflow.core import BaseSKMObject, ClassifierMixin, MetaEstimatorMixin
from skmultiflow.bayes import NaiveBayes
from skmultiflow.utils.utils import *
class PairedLearners(BaseSKMObject, ClassifierMixin, MetaEstimatorMixin):
def __init__(self, stable_estimator=NaiveBayes(), reactive_estimator=NaiveBayes(), window_size=12, threshold=0.2):
super().__init__()
# default values
self.c = None
self.stable_base_estimator = stable_estimator
self.reactive_base_estimator = reactive_estimator
self.stable_estimator = None
self.reactive_estimator = None
self.t = None
self.classes = None
self.w = window_size
self.theta = math.floor(self.w * threshold)
self.instances_X = None
self.instances_y = None
self.change_detected = None
self.number_of_errors = None
self.__configure()
def __configure(self):
self.classes = None
self.change_detected = 0
self.number_of_errors = 0
self.t = 0
self.c = [0 for i in range(self.w)]
self.instances_X = [None for _ in range(self.w)]
self.instances_y = [None for _ in range(self.w)]
self.stable_estimator = cp.deepcopy(self.stable_base_estimator)
self.reactive_estimator = cp.deepcopy(self.reactive_base_estimator)
def reset(self):
self.__configure()
return self
def partial_fit(self, X, y, classes=None, sample_weight=None):
if classes is None and self.classes is None:
raise ValueError("The first partial_fit call should pass all the classes.")
if classes is not None and self.classes is None:
self.classes = classes
elif classes is not None and self.classes is not None:
if set(self.classes) == set(classes):
pass
else:
raise ValueError(
"The classes passed to the partial_fit function differ from those passed in an earlier moment.")
r, c = get_dimensions(X)
for i in range(r):
self.__partial_fit(np.asarray([X[i]]), np.asarray([y[i]]))
return self
def __partial_fit(self, X, y):
self.instances_X[self.t] = X
self.instances_y[self.t] = y
self.stable_prediction = self.stable_estimator.predict(X)[0] == y[0]
self.reactive_prediction = self.reactive_estimator.predict(X)[0] == y[0]
self.number_of_errors -= self.c[self.t]
if not self.stable_prediction and self.reactive_prediction:
self.c[self.t] = 1
self.number_of_errors += 1
else:
self.c[self.t] = 0
if self.theta < self.number_of_errors:
self.change_detected += 1
self.stable_estimator = cp.deepcopy(self.reactive_estimator)
for i in range(self.w):
self.c[i] = 0
self.number_of_errors = 0
self.stable_estimator.partial_fit(X, y)
self.reactive_estimator = cp.deepcopy(self.reactive_base_estimator)
for i in range(self.w):
if self.instances_X[i] is None:
break
self.reactive_estimator.partial_fit(self.instances_X[i], self.instances_y[i])
self.t += 1
if self.t == self.w:
self.t = 0
def predict(self, X):
r, c = get_dimensions(X)
proba = self.predict_proba(X)
predictions = []
if proba is None:
return None
for i in range(r):
predictions.append(np.argmax(proba[i]))
return np.asarray(predictions)
def predict_proba(self, X):
proba = []
r, c = get_dimensions(X)
try:
partial_proba = self.reactive_estimator.predict_proba(X)
if len(partial_proba[0]) > max(self.classes) + 1:
raise ValueError("The number of classes in the base learner is larger than in the ensemble.")
if len(proba) < 1:
for n in range(r):
proba.append([0.0 for _ in partial_proba[n]])
for n in range(r):
for l in range(len(partial_proba[n])):
try:
proba[n][l] += partial_proba[n][l]
except IndexError:
proba[n].append(partial_proba[n][l])
except ValueError:
return np.zeros((r, 1))
except TypeError:
return np.zeros((r, 1))
# normalizing probabilities
sum_proba = []
for l in range(r):
sum_proba.append(np.sum(proba[l]))
aux = []
for i in range(len(proba)):
if sum_proba[i] > 0.:
aux.append([x / sum_proba[i] for x in proba[i]])
else:
aux.append(proba[i])
return np.asarray(aux)
<file_sep>from .draw_concise_diagram import draw_concise_diagram
__all__ = ["draw_concise_diagram"]<file_sep>import numpy as np
from libcdd.error_rate_based import *
#新建一个检测器
detector = HDDM_A()
#获取2000个数据,其中前后1000个数据分别服从不同的正态分布
#以0为决策边界
np.random.seed(1)
mu, std = 0, 0.1 # 均值和标准差
data1 = np.random.normal(mu, std, 1000) > 0
data1 = data1.astype(int)
mu, std = 0.5, 0.1
data2 = np.random.normal(mu, std, 1000) > 0
data2 = data2.astype(int)
data_stream = np.concatenate((data1, data2))
#检测过程
detected_indices = []
for i in range(data_stream.size):
detector.add_element(data_stream[i])
if detector.detected_change():
print(str(i)+"时刻发生了概念漂移")
#程序的运行结果为:1049时刻发生了概念漂移<file_sep>import numpy as np
import time
from libcdd.data_distribution_based import RD
from libcdd.draw import draw_concise_diagram
size = 80
concept_num = 10
l = 0
r = 1
dia = 100
data = []
for i in range(concept_num):
data += np.random.uniform(l, r, size).tolist()
l += dia
r += dia
t1 = time.time()
rd = RD(window_size=20, n=20)
t2 = time.time()
print(t2 - t1)
indexes = []
rds = []
for i in range(len(data)):
rd.add_element(data[i])
rds.append(rd.rd)
if rd.in_concept_change:
indexes.append(i)
print(rds)
draw_concise_diagram(np.asarray(rds))
print(indexes)
<file_sep>import numpy as np
from libcdd.error_rate_based import *
dm = GMADM()
data_stream = np.random.randint(2, size=2000)
for i in range(999, 1500):
data_stream[i] = 0
for i in range(2000):
dm.add_element(data_stream[i])
# if dm.detected_warning_zone():
# print('Warning zone has been detected in data: ' + str(data_stream[i]) + ' - of index: ' + str(i))
if dm.detected_change():
print('Change has been detected in data: ' + str(data_stream[i]) + ' - of index: ' + str(i))<file_sep>from skmultiflow.lazy import KNNAdwin
from skmultiflow.meta.online_under_over_bagging import OnlineUnderOverBagging
class OnlineUnderOverBagging(OnlineUnderOverBagging):
"""
come form skmultiflow
"""
def __init__(self, base_estimator=KNNAdwin(), n_estimators=10, sampling_rate=2, drift_detection=True,
random_state=None):
super().__init__(base_estimator, n_estimators, sampling_rate, drift_detection,
random_state)
<file_sep>from skmultiflow.meta.classifier_chains import *
class ClassifierChain(ClassifierChain):
def __init__(self, base_estimator=LogisticRegression(), order=None, random_state=None):
super().__init__(base_estimator, order, random_state)
class ProbabilisticClassifierChain(ProbabilisticClassifierChain):
def __init__(self, base_estimator=LogisticRegression(), order=None, random_state=None):
super().__init__(base_estimator, order, random_state)
class MonteCarloClassifierChain(MonteCarloClassifierChain):
def __init__(self, base_estimator=LogisticRegression(), M=10, random_state=None):
super().__init__(base_estimator, M, random_state)
<file_sep>from .ita import ITA
from .lsdd_cdt import LSDDCDT
from .lsdd_inc import LSDDINC
from .ldd_dsda import LDDDSDA
from .rd import RD
from .ede import EDE
__all__ = ["ITA", "LSDDCDT", "LSDDINC", "LDDDSDA", "RD", "EDE"]<file_sep>import numpy as np
from .base_distribution_detector import BaseDistributionDetector
class EDE(BaseDistributionDetector):
def __init__(self, window_size=100, alpha=0.05, sample_size=500):
super().__init__()
self.window_size = window_size
self.window_ref = [None for _ in range(self.window_size)]
self.window_sli = [None for _ in range(self.window_size)]
self.sample_size = sample_size
self.alpha = alpha
self.win_ref_i = None
self.win_sli_i = None
self.i = None
self.t = None
self.w = None
self.reset()
def sampling(self):
return
def reset(self):
super().reset()
self.win_ref_i = 0
self.win_sli_i = 0
self.i = 0
self.w = -1
def add_element(self, input_value):
if self.in_concept_change:
self.reset()
input_value = np.asarray(input_value)
if input_value.ndim != 1:
raise ValueError("X should has one dimension")
if self.win_ref_i < self.window_size:
self.window_ref[self.win_ref_i] = input_value
self.win_ref_i = self.win_ref_i + 1
return
self.window_sli[self.win_sli_i] = input_value
self.win_sli_i = (self.win_sli_i + 1) % self.window_size
self.i += 1
if self.i < self.window_size:
return
self.w = self.get_w(self.window_ref, self.window_sli)
if self.w > self.t:
self.in_concept_change = True
def get_w(self, X1, X2):
return 0
<file_sep>from sklearn.tree import DecisionTreeClassifier
from skmultiflow.meta.learn_nse import LearnNSE
class LearnNSE(LearnNSE):
"""
come from skmultiflow
"""
def __init__(self,
base_estimator=DecisionTreeClassifier(),
window_size=250,
slope=0.5,
crossing_point=10,
n_estimators=15,
pruning=None):
super().__init__(base_estimator,
window_size,
slope,
crossing_point,
n_estimators,
pruning)
<file_sep>LIBCDD
==========
##### A Library of Concept Drift Detection Method

## Introduction
**LIBCDD** is a python-based library of Concept Drift Detection Method, which includes:
+ Drift Detection Method (DDM)[10]
+ Early Drift Detection Method (EDDM)[11]
+ EWMA for Concept Drift Detection (ECDD)[12]
+ Fuzzy Windowing Drift Detection Method (FW-DDM)[13]
+ Heoffding’s inequality based Drift Detection Method A Test (HDDM_A)[14]
+ Heoffding’s inequality based Drift Detection Method W Test (HDDM_W)[14]
+ Reactive Drift Detection Method (RDDM)[15]
+ Page-Hinkley[16]
+ CUSUM[16]
+ Statistical Test of Equal Proportions Detection (STEPD)[17]
+ ADaptive WINdowing (ADWIN)[18]
+ Drift Detection Ensemble (DDE)[19]
+ Relativized Discrepancy (RD)[2]
+ Information-Theoretic Approach (ITA)[22]
+ Least Squares Density Difference-based Change Detection Test (LSDD-CDT)[23]
+ LSDD-INC[24]
+ Local Drift Degree-based Density Synchronized Drift Adaptation(LDD-DSDA)[25]
Our goal is to facilitate the use of the popular concept drift detection methods. **LIBCDD** provides a simple python interface where users can easily apply an appropriate concept drift detection method to their data. The taxonomy of these methods and characteristics of the concept drift detection methods are listed as follows:


## Requirement
+ scipy==1.0.0
+ numpy==1.13.3+mkl
+ matplotlib==2.1.1
+ scikit_learn==0.19.1
+ scikit_multiflow==0.4.1
## Download LibSampling
The current release (Version 1.0, May 2020) of **LIBCDD** can be obtained by directly cloning this repository.
## Quick Start
Below is the example code to call a concept drift detection method.
```python
import numpy as np
from libcdd.error_rate_based import *
#新建一个检测器
detector = HDDM_A()
#获取2000个数据,其中前后1000个数据分别服从不同的正态分布
#以0为决策边界
np.random.seed(1)
mu, std = 0, 0.1 # 均值和标准差
data1 = np.random.normal(mu, std, 1000) > 0
data1 = data1.astype(int)
mu, std = 0.5, 0.1
data2 = np.random.normal(mu, std, 1000) > 0
data2 = data2.astype(int)
data_stream = np.concatenate((data1, data2))
#检测过程
detected_indices = []
for i in range(data_stream.size):
detector.add_element(data_stream[i])
if detector.detected_change():
print(str(i)+"时刻发生了概念漂移")
#程序的运行结果为:1049时刻发生了概念漂移
```
## References
[1]<NAME>, <NAME>. Learning in the presence of concept drift and hidden contexts[J]. Machine learning, 1996, 23(1): 69-101.
[2]<NAME>, <NAME>, <NAME>. Detecting change in data streams[A]. In: Proceedings of the VLDB[C], 2004, pp. 180-191.
[3]<NAME>, <NAME>, <NAME>. Mining time-changing data streams[A]. In: Proceedings of the seventh ACM SIGKDD international conference on Knowledge discovery and data mining[C], 2001, pp. 97-106.
[4]<NAME>, <NAME>. Incremental learning from noisy data[J]. Machine learning, 1986, 1(3): 317-354.
[5]<NAME>, <NAME>, <NAME>, et al. Learning under concept drift: A review[J]. IEEE Transactions on Knowledge Data Engineering, 2018, 31(12): 2346-2363.
[6]<NAME>, <NAME>, <NAME>, et al. A survey on concept drift adaptation[J]. ACM computing surveys, 2014, 46(4): 1-37.
[7]<NAME>, <NAME>, Detection of abrupt changes: theory and application (1st Edition)[M], prentice Hall Englewood Cliffs, 1993.
[8]<NAME>, <NAME>, <NAME>, et al. Moa: Massive online analysis[J]. Journal of Machine Learning Research, 2010, 11(May): 1601-1604.
[9]<NAME>, <NAME>, <NAME>, et al. Scikit-multiflow: A multi-output streaming framework[J]. 2018, 19(1): 2915-2914.
[10]<NAME>, <NAME>, <NAME>, et al. Learning with drift detection[A]. In: Proceedings of the Brazilian symposium on artificial intelligence[C], 2004, pp. 286-295.
[11]<NAME>, <NAME>, <NAME>, et al. Early drift detection method[A]. In: Proceedings of the Fourth international workshop on knowledge discovery from data streams[C], 2006, pp. 77-86.
[12]<NAME>, <NAME>, D.<NAME>, et al. Exponentially weighted moving average charts for detecting concept drift[J]. Pattern recognition letters, 2012, 33(2): 191-198.
[13]<NAME>, <NAME>, <NAME>. Fuzzy time windowing for gradual concept drift adaptation[A]. In: Proceedings of the 2017 IEEE International Conference on Fuzzy Systems (FUZZ-IEEE)[C], 2017, pp. 1-6.
[14]<NAME>, <NAME>, <NAME>, et al. Online and non-parametric drift detection methods based on Hoeffding’s bounds[J]. IEEE Transactions on Knowledge Data Engineering, 2014, 27(3): 810-823.
[15]<NAME>, <NAME>, <NAME>, et al. RDDM: Reactive drift detection method[J]. Expert Systems with Applications, 2017, 90(344-355.
[16]E.S. Page. Continuous inspection schemes[J]. Biometrika, 1954, 41(1/2): 100-115.
[17]<NAME>, <NAME>. Detecting concept drift using statistical testing[A]. In: Proceedings of the International conference on discovery science[C], 2007, pp. 264-269.
[18]<NAME>, <NAME>. Learning from time-changing data with adaptive windowing[A]. In: Proceedings of the 2007 SIAM international conference on data mining[C], 2007, pp. 443-448.
[19]<NAME>, <NAME>, <NAME>. A lightweight concept drift detection ensemble[A]. In: Proceedings of the 2015 IEEE 27th International Conference on Tools with Artificial Intelligence (ICTAI)[C], 2015, pp. 1061-1068.
[20]<NAME>, <NAME>. A guided tour of Chernoff bounds[J]. Information processing letters, 1990, 33(6): 305-308.
[21]<NAME>. An overview of statistical learning theory[J]. IEEE transactions on neural networks, 1999, 10(5): 988-999.
[22]<NAME>, <NAME>, <NAME>, et al. An information-theoretic approach to detecting changes in multi-dimensional data streams[A]. In: Proceedings of the In Proc. Symp. on the Interface of Statistics, Computing Science, and Applications[C], 2006, pp. 1-24.
[23]<NAME>, <NAME>, <NAME>. A pdf-free change detection test based on density difference estimation[J]. IEEE transactions on neural networks learning systems, 2016, 29(2): 324-334.
[24]<NAME>, <NAME>, <NAME>. An incremental change detection test based on density difference estimation[J]. IEEE Transactions on Systems, Man, Cybernetics: Systems, 2017, 47(10): 2714-2726.
[25]<NAME>, <NAME>, <NAME>, et al. Regional concept drift detection and density synchronized drift adaptation[A]. In: Proceedings of the IJCAI International Joint Conference on Artificial Intelligence[C], 2017, pp. 2280–2286.
<file_sep>import os
import numpy as np
from libcdd.data_distribution_based import *
def test_ita(test_path):
"""
ITA drift detection test.
The first half of the stream contains a sequence corresponding to a normal distribution of integers from 0 to 1.
From index 999 to 1999 the sequence is a normal distribution of integers from 0 to 7.
"""
ita = RD()
test_file = os.path.join(test_path, 'drift_stream.npy')
data_stream = np.load(test_file)
expected_indices = [1023, 1055, 1087, 1151]
detected_indices = []
for i in range(data_stream.size):
# print(data_stream[i])
ita.add_element(data_stream[i])
if ita.detected_change():
detected_indices.append(i)
print(detected_indices)
assert detected_indices == expected_indices
expected_info = "ita(delta=0.002)"
assert ita.get_info() == expected_info
test_ita('/Users/yang/Documents/ConceptDriftDetectionLib/test/data_distribution_based/')<file_sep>import numpy as np
import math
import scipy.stats as stats
from .base_distribution_detector import BaseDistributionDetector
from skmultiflow.bayes import NaiveBayes
from skmultiflow.utils.utils import *
from sklearn.neighbors import KDTree
class LDDDSDA(BaseDistributionDetector):
def __init__(self, batch_size=100, train_size=100, rho=0.1, alpha=0.05, base_learner=NaiveBayes()):
super().__init__()
self.w = batch_size
self.l = base_learner
self.n = train_size
self.alpha = alpha
self.rho = rho
self.trained = False
self.d_train_X, self.d_train_y = [], []
self.d_buffer_X, self.d_buffer_y = [], []
self.reset()
def reset(self):
super().reset()
def add_element(self, X, y):
if self.in_concept_change:
self.reset()
X, y = np.asarray(X), np.asarray(y)
# if X.ndim != 1 or y.ndim != 1:
# raise ValueError("input_value should has one dimension")
if (not self.trained) and len(self.d_train_X) < self.n:
self.d_train_X.append(X)
self.d_train_y.append(y)
if len(self.d_train_X) == self.n:
self.l.partial_fit(np.asarray(self.d_train_X), np.asarray(self.d_train_y))
self.trained = True
return
if len(self.d_train_X) < self.w:
self.d_train_X.append(X)
self.d_train_y.append(y)
return
self.d_buffer_X.append(X)
self.d_buffer_y.append(y)
if len(self.d_buffer_X) < self.w:
return
self.d_train_X, self.d_train_y = self.ldd_dis(np.asarray(self.d_train_X),
np.asarray(self.d_train_y),
np.asarray(self.d_buffer_X),
np.asarray(self.d_buffer_y))
self.l = NaiveBayes()
self.l.fit(self.d_train_X, self.d_train_y)
self.d_train_X = self.d_train_X.tolist()
self.d_train_y = self.d_train_y.tolist()
print(len(self.d_train_X))
self.d_buffer_X = []
self.d_buffer_y = []
return
def predict(self, X):
return self.l.predict(X)
def ldd_dis(self, d1_X, d1_y, d2_X, d2_y):
d = np.append(d1_X, d2_X, axis=0)
d_y = np.append(d1_y, d2_y, axis=0)
d1_dec, d1_sta, d1_inc = [], [], []
d2_dec, d2_sta, d2_inc = [], [], []
kdtree = KDTree(d)
d_knn = []
for i in range(d.shape[0]):
d_knn.append(set(kdtree.query(X=d[i:i+1],
k=int(d.shape[0] * self.rho),
return_distance=False)[0]))
indexes = np.arange(d.shape[0])
np.random.shuffle(indexes)
_d1 = set(indexes[:d1_X.shape[0]])
_d2 = set(indexes[d1_X.shape[0]:])
deltas = []
for i in range(d.shape[0]):
x1 = len(d_knn[indexes[i]] & _d1)
x2 = len(d_knn[indexes[i]] & _d2)
if i < d1_X.shape[0]:
deltas.append(x2 / x1 - 1)
else:
deltas.append(x1 / x2 - 1)
delta_std = np.std(deltas, ddof=1)
theta_dec = stats.norm.ppf(1 - self.alpha, 0, delta_std)
theta_inc = stats.norm.ppf(self.alpha, 0, delta_std)
_d1 = set(np.arange(d1_X.shape[0]))
_d2 = set(np.arange(d1_X.shape[0], d.shape[0]))
for i in range(d.shape[0]):
x1 = len(d_knn[i] & _d1)
x2 = len(d_knn[i] & _d2)
if i < d1_X.shape[0]:
delta = x2 / x1 - 1
if delta < theta_dec:
d1_dec.append(i)
elif delta > theta_inc:
d1_inc.append(i)
else:
d1_sta.append(i)
else:
delta = x1 / x2 - 1
if delta < theta_dec:
d2_dec.append(i)
elif delta > theta_inc:
d2_inc.append(i)
else:
d2_sta.append(i)
if len(d1_dec) == 0 and len(d2_inc) == 0:
return d1_X, d1_y
self.in_concept_change = True
aux = []
if len(d2_dec) != 0:
aux.append(len(d1_inc) / len(d2_dec))
if len(d2_sta) != 0:
aux.append(len(d1_sta) / len(d2_sta))
if len(d2_inc) != 0:
aux.append(len(d1_dec) / len(d2_inc))
k = min(aux)
d2_dec += d1_inc[:int(k * len(d2_dec))]
d2_sta += d1_sta[:int(k * len(d2_sta))]
d2_inc += d1_dec[:int(k * len(d2_inc))]
aux_indexes = d2_inc + d2_sta + d2_dec
r = self.w / len(aux_indexes)
d2_dec = d2_dec[:int(len(d2_dec)*r)]
d2_sta = d2_sta[:int(len(d2_sta)*r)]
d2_inc = d1_inc[:int(len(d2_inc)*r)]
aux_indexes = d2_inc + d2_sta + d2_dec
return d[aux_indexes], d_y[aux_indexes]
<file_sep>from sklearn.tree import DecisionTreeClassifier
from skmultiflow.meta.learn_pp import LearnPP
class LearnPP(LearnPP):
"""
come from skmultiflow
"""
def __init__(self, base_estimator=DecisionTreeClassifier(),
error_threshold=0.5,
n_estimators=30,
n_ensembles=10,
window_size=100,
random_state=None):
super().__init__(base_estimator,
error_threshold,
n_estimators,
n_ensembles,
window_size,
random_state)
<file_sep>import numpy as math
import math
from skmultiflow.drift_detection.base_drift_detector import BaseDriftDetector
class FWECDD(BaseDriftDetector):
def __init__(self, min_num_instances=30, min_fw_size = 15, warning_level=2.0, _lambda=0.2, fw_rate=0.8):
super().__init__()
self.sample_count = None
self.miss_sum = None
self.miss_prob = None
self.fw_miss_num = None
self.fw_miss_prob = None
# self.f_m_s = None
# self.f_z_t = None
self.min_instances = min_num_instances
self.warning_level = warning_level
self.min_fw_size = min_fw_size
self.fw_rate = fw_rate
self.pre_hist = []
self._lambda = _lambda
self.z_t = None
self.reset()
def reset(self):
super().reset()
self.sample_count = 1
self.miss_sum = 0
self.miss_prob = 0
# self.m_s = 0
self.z_t = 0
self.pre_hist.clear()
def add_element(self, prediction):
self.pre_hist.append(prediction)
if self.in_concept_change:
self.reset()
self.miss_sum += prediction
self.miss_prob = self.miss_sum / self.sample_count
# self.m_s = math.sqrt(self.miss_prob * (1.0 - self.miss_prob) * self._lambda * (1.0 - math.pow(1.0 - self._lambda, 2.0 * self.sample_count)) / (2.0 - self._lambda))
self.sample_count += 1
self.fw_miss_prob = 0
self.fw_miss_num = 1
tmp = len(self.pre_hist)*self.fw_rate
for i in range(len(self.pre_hist)):
if tmp >= self.min_fw_size and i <= tmp:
self.fw_miss_prob += self.pre_hist[i]*i/tmp
self.fw_miss_num += i / tmp
# self.f_z_t += self._lambda*(self.pre_hist[i]*i/tmp-self.f_z_t)
else:
self.fw_miss_prob += self.pre_hist[i]
self.fw_miss_num += 1
# self.f_z_t += self._lambda*(self.pre_hist[i]-self.f_z_t)
self.fw_miss_prob /= self.fw_miss_num
self.f_m_s = math.sqrt(self.fw_miss_prob*(1-self.fw_miss_prob)/self.fw_miss_num)
self.z_t += self._lambda * (prediction - self.z_t)
L_t = 3.97 - 6.56 * self.fw_miss_prob + 48.73 * math.pow(self.fw_miss_prob, 3) - 330.13 * math.pow(self.fw_miss_prob, 5) + 848.18 * math.pow(self.fw_miss_prob, 7)
self.estimation = self.miss_prob
self.in_concept_change = False
self.in_warning_zone = False
self.delay = 0
if self.sample_count < self.min_instances:
return
if self.z_t > self.fw_miss_prob + L_t * self.f_m_s:
self.in_concept_change = True
elif self.z_t > self.fw_miss_prob + self.warning_level * L_t * self.f_m_s:
self.in_warning_zone = True
else:
self.in_warning_zone = False
|
c84d705b4dfff9e2221338b92e8cd5aa9c397c48
|
[
"Markdown",
"Python"
] | 47 |
Python
|
HsiangYangChu/LIBCDD
|
6ff2cf032ac1aef48ac028043f342d68dc80516b
|
ae3f4a228eba35c1619da9bbc9ff6c3a00e5f271
|
refs/heads/master
|
<repo_name>hutom-io/COCOcustom<file_sep>/make_custom_json/coco_json.py
from submask import *
from submaskann import *
import json
import os
data_path = './images/'
file_list = os.listdir(data_path)
n_data = len(file_list)
mask_images = []
for image in range(n_data):
mask_images.append(Image.open(data_path+file_list[image]))
# Define which colors match which categories in the images
body_id, upper_id, lower_id = [1, 2, 3]
category_ids = {}
for image in range(n_data):
category_ids[image+1] = {
'(255, 255, 0)': body_id,
'(255, 0, 0)': upper_id,
'(0, 0, 255)': lower_id
}
is_crowd = 0
# These ids will be automatically increased as we go
annotation_id = 1
image_id = 1
# Create the annotations
cocoanns = {}
annotations = []
images = []
for img in range(n_data):
images.append({'file_name': file_list[img],'width':1920,'height':1080,'id':img})
for mask_image in mask_images:
sub_masks = create_sub_masks(mask_image)
for color, sub_mask in sub_masks.items():
category_id = category_ids[image_id][color]
annotation = create_sub_mask_annotation(sub_mask, image_id, category_id, annotation_id, is_crowd)
annotations.append(annotation)
annotation_id += 1
image_id += 1
cocoanns['annotations'] = annotations
cocoanns['categories']=[{'supercategory':'bipolar','id':1,'name':'body'}
,{'supercategory':'bipolar','id':2,'name':'upper'}
,{'supercategory':'bipolar','id':3,'name':'lower'}]
cocoanns['images'] = images
with open('ann_1021.json', 'w') as json_file:
json.dump(cocoanns, json_file)
<file_sep>/README.md
# COCO style customized json file
First, make coco style json file.
1. Move to 'make_custom_json'
2. python coco_json.py
# COCO customized API
1. Move to 'cocoapi'
2. python pycococustom.py
|
1557ac093599ce710c7282a364d82130edec3bea
|
[
"Markdown",
"Python"
] | 2 |
Python
|
hutom-io/COCOcustom
|
e2ea8c47cadcc13098f52ecd40624d7d34c75d8d
|
2b7267f53bad9378dc6fa9c513b53b8d29a11dd3
|
refs/heads/master
|
<repo_name>LzCrazy/HTML<file_sep>/jd/js/index.js
$(document).ready(function() {
$(".mainProNav dl dt").mouseover(function() {
$(".mainProNav dl").removeClass("dlHover");
$(this).parent().addClass("dlHover");
})
$(".mainProNav").hover(function() {
$(this).addClass("mainProNavHover");
},function() {
$(this).removeClass("mainProNavHover");
$(".mainProNav dl").removeClass("dlHover");
})
});
|
f187b25c57fd70c0d1f43066e1a7d55fd3b7f120
|
[
"JavaScript"
] | 1 |
JavaScript
|
LzCrazy/HTML
|
c2214bb13b7f831035a07a7e694f1747685510b6
|
56564ea9203c1f10f8780f4a105c362c407f3f94
|
refs/heads/master
|
<file_sep>using System.Collections.Generic;
using System.Threading.Tasks;
using Microsoft.EntityFrameworkCore;
using RestaurantAPI.Domain.Models;
using AppContext = RestaurantAPI.Persistence.AppContext;
namespace RestaurantAPI.Application.Repositories
{
public class DishesRepository : IDishesRepository
{
private readonly AppContext _context;
public DishesRepository(AppContext context)
{
_context = context;
}
public async Task<Dish> GetAsync(string id)
{
return await _context.Dishes.FindAsync(id);
}
public async Task<IEnumerable<Dish>> GetDishesAsync()
{
return await _context.Dishes.ToListAsync();
}
public async Task CreateDishAsync(Dish dish)
{
_context.Dishes.Add(dish);
await _context.SaveChangesAsync();
}
}
}
<file_sep>using System.Collections.Generic;
using System.Threading.Tasks;
using RestaurantAPI.Domain.Models;
namespace RestaurantAPI.Application.Repositories
{
public interface IOrdersRepository
{
Task<Order> GetAsync(string id);
Task<IEnumerable<Order>> GetAllAsync(int? days);
Task UpdateAsync(Order order);
Task<Order> CreateAsync(Order order);
}
}
<file_sep>using System.Collections.Generic;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Logging;
using RestaurantAPI.Application.Services;
using RestaurantAPI.Domain.Models;
using RestaurantAPI.Domain.Requests;
namespace RestaurantAPI.Controllers
{
[Route("api/orders")]
[ApiController]
public class OrdersController : ControllerBase
{
private readonly IOrderService _service;
private readonly ILogger<OrdersController> _logger;
public OrdersController(IOrderService service, ILogger<OrdersController> logger)
{
_service = service;
_logger = logger;
}
[HttpGet]
public async Task<ActionResult<IEnumerable<Order>>> GetOrdersAsync([FromQuery] int? days)
{
_logger.LogDebug($"New request to get all orders");
if (days.HasValue)
{
_logger.LogDebug($"{nameof(days)}: {days.Value}");
}
IEnumerable<Order> result = await _service.GetAllAsync(days);
return Ok(result);
}
[HttpGet("{id}")]
public async Task<ActionResult<Order>> GetAsync(string id)
{
_logger.LogDebug($"New request get order by ID, orderId: {id}");
Order result = await _service.GetAsync(id);
return Ok(result);
}
[HttpPut("{orderId}/dishes/{dishId}")]
public async Task<ActionResult<Order>> AddDishToOrderAsync(string orderId, string dishId)
{
_logger.LogDebug($"New request add a dish to an order as paid, orderId: {orderId}, dishId: {dishId}");
await _service.AddDishToOrderAsync(orderId, dishId);
return NoContent();
}
[HttpPut("{orderId}/payment")]
public async Task<ActionResult<Order>> MarkPaidAsync(string orderId)
{
_logger.LogDebug($"New request to mark order as paid, orderId: {orderId}");
await _service.MarkPaidAsync(orderId);
return NoContent();
}
[HttpPost]
public async Task<ActionResult<Order>> CreateOrderAsync(CreateOrderRequest request)
{
_logger.LogDebug($"New request to create a new order, request: {request}");
await _service.CreateOrderAsync(request.IsPaid, request.Dishes);
return NoContent();
}
}
}<file_sep>using Microsoft.AspNetCore.Mvc;
using System;
using System.Net;
using Microsoft.AspNetCore.Diagnostics;
using RestaurantAPI.Exceptions;
namespace RestaurantAPI.Controllers
{
[ApiExplorerSettings(IgnoreApi = true)]
[ApiController]
public class ErrorsController : ControllerBase
{
[Route("error")]
public IActionResult Error()
{
var errorDetails = new ErrorDetails();
var context = HttpContext.Features.Get<IExceptionHandlerFeature>();
Exception exception = context.Error;
switch (exception)
{
case BadRequestException:
{
errorDetails.Message = exception.Message;
errorDetails.StatusCode = (int)HttpStatusCode.BadRequest;
return BadRequest(errorDetails);
}
case NotFoundException:
{
errorDetails.Message = exception.Message;
errorDetails.StatusCode = (int)HttpStatusCode.NotFound;
return NotFound(errorDetails);
}
default:
{
return Problem(exception.Message, null, 500);
}
}
}
}
}<file_sep>using System;
using System.Diagnostics;
using Microsoft.AspNetCore.Hosting;
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
namespace RestaurantAPI
{
public class Program
{
public static void Main(string[] args)
{
CreateHostBuilder(args).Build().Run();
}
public static IHostBuilder CreateHostBuilder(string[] args) =>
Host.CreateDefaultBuilder(args)
.ConfigureLogging(builder =>
{
builder.AddSimpleConsole(options =>
{
options.IncludeScopes = false;
options.SingleLine = true;
options.TimestampFormat = "dd/mm/yyyy hh:mm:ss.s ";
});
var currentProcess = Process.GetCurrentProcess();
builder.AddFile($"Logs/RestaurantAPI_{DateTime.Now:yyyy-MM-dd_HH-mm-ss}_{currentProcess.ProcessName}_{currentProcess.Id}.log", LogLevel.Debug);
})
.ConfigureWebHostDefaults(webBuilder =>
{
webBuilder.UseStartup<Startup>();
});
}
}<file_sep># RestaurantAPI
## Description
This project is a **.NET 5** implemented Web API for a small restaurant management.
Uses the RESTful api rules and clean architecture.
**RestaurantAPI** enables communication with any database using **Entity Framework Core** consisting of sending and receiving data regarding orders and dishes, the database that is being used is a local sql database.
## Stack
It uses **Entity Framework Core** to communicate with a database, which contains required data tables like:
* Orders - where information about orders are stored
* Dishes - where information about dishes are stored
## How to use
* Open the solution and run it.
* You can either use the swagger or postman or any other posting methods to get and post from the api.
* There is the following functionality :
* POST a new dish to the menu(/api/dishes),
* GET what dishes are on the menu(/api/dishes),
* GET a dish by id(/api/dishes/{id}),
* POST a new order(/api/orders),
* GET all orders(/api/orders) with an option to check orders from the past by adding the amount of days to go back,
* GET order by id(/api/orders/{id}),
* PUT a new dish in an order (/api/orders/{orderId}/dishes/{dishId}),
* PUT to change the order paid status from false to true(/api/orders/{orderId}/payment)
<file_sep>using Microsoft.AspNetCore.Mvc;
using System.Collections.Generic;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using RestaurantAPI.Application.Services;
using RestaurantAPI.Domain.Models;
using RestaurantAPI.Domain.Requests;
namespace RestaurantAPI.Controllers
{
[Route("api/dishes")]
[ApiController]
public class DishesController : ControllerBase
{
private readonly IDishesService _service;
private readonly ILogger<DishesController> _logger;
public DishesController(IDishesService service, ILogger<DishesController> logger)
{
_service = service;
_logger = logger;
}
[HttpGet]
public async Task<IActionResult> GetAllAsync()
{
_logger.LogDebug($"New request to get all dishes");
IEnumerable<Dish> result = await _service.GetAllAsync();
return Ok(result);
}
[HttpGet("{id}")]
public async Task<IActionResult> GetAsync(string id)
{
_logger.LogDebug($"New request get dish by ID, dishId: {id}");
Dish result = await _service.GetAsync(id);
return Ok(result);
}
[HttpPost]
public async Task<IActionResult> CreateDishAsync(CreateDishRequest request)
{
_logger.LogDebug($"New request to create a new dish, request: {request}");
await _service.CreateDishAsync(request.Name, request.Price);
return NoContent();
}
}
}<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.ChangeTracking;
using Microsoft.Extensions.Logging;
using RestaurantAPI.Domain.Models;
using AppContext = RestaurantAPI.Persistence.AppContext;
namespace RestaurantAPI.Application.Repositories
{
public class OrdersRepository : IOrdersRepository
{
private readonly AppContext _context;
public OrdersRepository(AppContext context)
{
_context = context;
}
public async Task<Order> GetAsync(string id)
{
return await _context.Orders
.Include(x => x.Dishes)
.SingleOrDefaultAsync(x => x.Id == id);
}
public async Task<IEnumerable<Order>> GetAllAsync(int? days)
{
IQueryable<Order> query = _context.Orders.Include(order => order.Dishes);
if (days.HasValue)
{
query = query.Where(order => order.CreatedTime >= DateTime.Now.AddDays(-days.Value));
}
return await query.ToListAsync();
}
public async Task UpdateAsync(Order order)
{
_context.Entry(order).State = EntityState.Modified;
await _context.SaveChangesAsync();
}
public async Task<Order> CreateAsync(Order order)
{
EntityEntry<Order> result = await _context.Orders.AddAsync(order);
await _context.SaveChangesAsync();
return result.Entity;
}
}
}<file_sep>using System.Collections.Generic;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using RestaurantAPI.Application.Repositories;
using RestaurantAPI.Domain.Models;
namespace RestaurantAPI.Application.Services
{
public class DishesService : IDishesService
{
private readonly IDishesRepository _repository;
private readonly ILogger<IDishesService> _logger;
public DishesService(IDishesRepository repository, ILogger<IDishesService> logger)
{
_repository = repository;
_logger = logger;
}
public async Task<IEnumerable<Dish>> GetAllAsync()
{
return await _repository.GetDishesAsync();
}
public async Task<Dish> GetAsync(string id)
{
return await _repository.GetAsync(id);
}
public async Task CreateDishAsync(string name, double price)
{
var dish = new Dish
{
Name = name,
Price = price
};
await _repository.CreateDishAsync(dish);
_logger.LogInformation($"Dish has been created with id '{dish.Id}'");
}
}
}<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
namespace RestaurantAPI.Domain.Models
{
public class Order
{
[Key]
[DatabaseGenerated(DatabaseGeneratedOption.Identity)]
public string Id { get; set; }
public double TotalPrice { get; set; }
public DateTime CreatedTime { get; set; }
public bool IsPaid { get; set; }
public ICollection<Dish> Dishes { get; set; }
}
}<file_sep>using System.Collections.Generic;
using System.Threading.Tasks;
using RestaurantAPI.Domain.Models;
namespace RestaurantAPI.Application.Repositories
{
public interface IDishesRepository
{
Task<Dish> GetAsync(string id);
Task<IEnumerable<Dish>> GetDishesAsync();
Task CreateDishAsync(Dish dish);
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using RestaurantAPI.Application.Repositories;
using RestaurantAPI.Domain.Models;
using RestaurantAPI.Exceptions;
namespace RestaurantAPI.Application.Services
{
public class OrderService : IOrderService
{
private readonly IOrdersRepository _ordersRepository;
private readonly IDishesRepository _dishesRepository;
private readonly ILogger<IOrderService> _logger;
public OrderService(IOrdersRepository ordersRepository, IDishesRepository dishesRepository, ILogger<IOrderService> logger)
{
_ordersRepository = ordersRepository;
_dishesRepository = dishesRepository;
_logger = logger;
}
public async Task<Order> GetAsync(string id)
{
return await _ordersRepository.GetAsync(id);
}
public async Task<IEnumerable<Order>> GetAllAsync(int? days)
{
return await _ordersRepository.GetAllAsync(days);
}
public async Task AddDishToOrderAsync(string orderId, string dishId)
{
Order order = await _ordersRepository.GetAsync(orderId);
if (order == null)
{
var error = $"Order '{orderId}' could not be found";
_logger.LogError(error);
throw new NotFoundException(error);
}
Dish dish = await _dishesRepository.GetAsync(dishId);
if (dish == null)
{
var error = $"Dish '{dishId}' could not be found";
_logger.LogError(error);
throw new NotFoundException(error);
}
order.Dishes.Add(dish);
order.TotalPrice = order.Dishes.Sum(d => d.Price);
await _ordersRepository.UpdateAsync(order);
_logger.LogInformation($"Dish '{dish.Name}' has been added to orderId '{orderId}'");
_logger.LogInformation($"Order totalPrice has been updated to '{order.TotalPrice}'");
}
public async Task MarkPaidAsync(string orderId)
{
Order order = await _ordersRepository.GetAsync(orderId);
if (order == null)
{
var error = $"Order '{orderId}' could not be found";
_logger.LogError(error);
throw new NotFoundException(error);
}
order.IsPaid = true;
await _ordersRepository.UpdateAsync(order);
_logger.LogInformation($"Order '{orderId}' has been mark as paid");
}
public async Task CreateOrderAsync(bool isPaid, ICollection<Dish> dishes)
{
var order = new Order
{
Dishes = dishes,
IsPaid = isPaid,
CreatedTime = DateTime.Now,
TotalPrice = dishes.Sum(dish => dish.Price)
};
Order createdOrder = await _ordersRepository.CreateAsync(order);
_logger.LogInformation($"Order has been created with id '{createdOrder.Id}'");
}
}
}<file_sep>using System.Collections.Generic;
using System.Threading.Tasks;
using RestaurantAPI.Domain.Models;
namespace RestaurantAPI.Application.Services
{
public interface IOrderService
{
Task<Order> GetAsync(string id);
Task<IEnumerable<Order>> GetAllAsync(int? days);
Task AddDishToOrderAsync(string orderId, string dishId);
Task MarkPaidAsync(string orderId);
Task CreateOrderAsync(bool isPaid, ICollection<Dish> dishes);
}
}<file_sep>using System.Collections.Generic;
using RestaurantAPI.Domain.Models;
namespace RestaurantAPI.Domain.Requests
{
public class CreateOrderRequest
{
public bool IsPaid { get; set; }
public ICollection<Dish> Dishes { get; set; }
public override string ToString()
{
return $"{nameof(IsPaid)}: {IsPaid}, Dishes Count: {Dishes.Count}";
}
}
}<file_sep>using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.OpenApi.Models;
using RestaurantAPI.Application.Repositories;
using RestaurantAPI.Application.Services;
using RestaurantAPI.Persistence;
namespace RestaurantAPI.Extensions
{
public static class ServiceExtensions
{
public static void AddMyServices(this IServiceCollection services)
{
services.AddScoped<IOrderService, OrderService>();
services.AddScoped<IDishesService, DishesService>();
}
public static void AddMyRepositories(this IServiceCollection services)
{
services.AddScoped<IDishesRepository, DishesRepository>();
services.AddScoped<IOrdersRepository, OrdersRepository>();
}
public static void AddMyDatabase(this IServiceCollection services, IConfiguration config)
{
services.AddDbContext<AppContext>(options =>
{
options.UseSqlServer(config.GetConnectionString("Default"));
});
}
public static void AddMySwagger(this IServiceCollection services)
{
services.AddSwaggerGen(c =>
{
c.SwaggerDoc("v1", new OpenApiInfo {Title = "Restaurant Api", Version = "v1"});
});
}
}
}<file_sep>using System.Collections.Generic;
using System.Threading.Tasks;
using RestaurantAPI.Domain.Models;
namespace RestaurantAPI.Application.Services
{
public interface IDishesService
{
Task<IEnumerable<Dish>> GetAllAsync();
Task<Dish> GetAsync(string id);
Task CreateDishAsync(string name, double price);
}
}<file_sep>namespace RestaurantAPI.Persistence
{
class Seed
{
}
}
<file_sep>namespace RestaurantAPI.Domain.Requests
{
public class CreateDishRequest
{
public string Name { get; set; }
public double Price { get; set; }
}
}
|
9b8df364509f14eca1c68da18048f401b3d9e4fc
|
[
"Markdown",
"C#"
] | 18 |
C#
|
benda-cyber/RestaurantAPI
|
a226a60218bdf20691cd5561d01130d1e74ddc7f
|
3716e85a2b1b21683aa52887aa14f77f73d5b5c6
|
refs/heads/master
|
<file_sep>package exercise3;
public class ConvexPolygon {
//private Point vertices;
public double perimeter(){
return;
};
public double area(){
return;
};
public ConvexPolygon(Point vertices){
//this.vertices=vertices;
}
public ConvexPolygon(){};
}
<file_sep>package exercise3;
public class Rectangle extends ConvexPolygon {
Rectangle(Point upperLeft,Point lowerRigth){
};
Rectangle(){};
}
<file_sep>package exercise3;
public class Circle extends Ellipsis{
Circle(Point center, int radius){}
}
<file_sep>package exercise2;
class Person {
private int age;
private static int popSize=0;
private static long sumAge=0;
public Person (int age){
this.age=age;
popSize++;
sumAge=sumAge+this.age;
}
public static int computePopulationSize(){
return popSize;
};
public static float computeAveragePopulationAge(){
return ((float)sumAge/popSize);
};
public static void resetPopulation(){
sumAge=0;
popSize = 0;
}
}
|
2b15ab30a6c00b2b27234a5f11ffe08a30f24b1b
|
[
"Java"
] | 4 |
Java
|
umons-polytech-odl-2018/odl-tp1-l-m-s-vansnick-maes
|
de38a591117e7065d857e92b6f7b612da45ad0ce
|
fcfacffe35c6e1f52f080a031a32a6b4d6db018d
|
refs/heads/master
|
<file_sep>#' A function to join GPDD time series data with temperature data.
#'
#' This function calls the location data table from rgpdd, and joins it with the temperature data using the LocationID index.
#' In addition, this function can be used to join to the GPDD main data table, which contains indexes for all other data tables.
#' @param location Do you want to include information on location of the population from the gpdd_location data table? Default is FALSE.
#' @param main Do you want to include information on time series from the gpdd_main data table? Default is TRUE.
#' @examples
#' data <- gpdd_location_temperature() # only include location meta data
#' head(data)
#' data_main <- gpdd_location_temperature(location=TRUE, main=TRUE) #include both location meta data and main table.
#' head(data_main)
#' @import dplyr
#' @import rgpdd
#' @export
gpdd_location_temperature <- function(location=FALSE,main=TRUE){
if(location == TRUE){
if(main == FALSE){
df <- dplyr::left_join(rgpdd::gpdd_location, rgpddPlusTraits::temperature, by = "LocationID")
} else{
df <- dplyr::left_join(rgpddPlusTraits::temperature, rgpdd::gpdd_main, by = "LocationID")
df <- dplyr::left_join(rgpdd::gpdd_location, df, by = "LocationID")
}
} else{
if(main == FALSE){
df <- 0
} else{
df <- dplyr::left_join(rgpddPlusTraits::temperature, rgpdd::gpdd_main, by = "LocationID")
}
}
return(df)
}<file_sep>#' @name life_history
#' @title Life history data for populations in the GPDD
#' @description This is data on life history traits for mammal, bird, and fish species in the global population dyanmics database (GPDD).
#' These trait data are an average for each species, and are intended for across taxa comparative analyses. Data come from published
#' compilations. See life_history_sources for references for trait values of each species.
#'
#' These data can be linked to the time series data through the TaxonID number. See http://www3.imperial.ac.uk/cpb/databases/gpdd for more
#' information on the GPDD metadata organization.
#'
#' @return Mass_kg: Average adult body mass in kilograms. When females and males differ in mass, only the female size is included.
#' @return Temp: Average body temperature. For fish, the body temperature is assumed to be equal to preferred temperature. When species specific temperatures of mammals and birds are unavailable, the average temperature of all mammals or birds is given.
#' @return Class_or_species: Indicates wheter the body temperature estimate from the taxonomic level of Class, or Species, or, for fish, a preferred temperature.
#' @return MSMR_W_g: Average metabolic rate per unit body mass (i.e. whole organism metabolic rate divided by adult body mass), in units of Watts per gram.
#' @return Average_Lifespan_years: This is calculated as the inverse of the average natural mortality rate, and is an estimate of the average lifespan of the species in nature, in units of years.
#' @return Lifespan: This is more nearly the maximum lifespan in the wild, units of years.
#' @return age_first_reproduction_years: The average age of first reproduction of females. Units of years.
#' @return PrimaryDiet: Rough classification based on diet studies. No objective methods were used to classify species. Only included for mammals and birds.
#' @return TrophicLevel: Quantitative classification based on diet studies, collected from FishBase. Only included for Fish.
#'
#'
#' @name life_history
#' @docType data
#' @author <NAME> \email{<EMAIL>}
NULL
<file_sep>#' @name temperature
#' @title Average monthly and annual temperature data for populations in the GPDD
#' @description This is data on average monthly and annual temperature for each population in the global population dynamics database (GPDD).
#' Location metadata in the GPDD is indexed with a unique LocationID number. I have calculated current temperatures within
#' 0.1 decimal degrees latitude and longitude for each location. If data were unavailable within 0.1 decimal degrees, temperatures were
#' pulled from 0.5 decimal degrees. The column "close_class" specifies whether data comes from within 0.1 or 0.5 decimal degrees of the population.
#' All data come from WorldClim current temperature conditions, which are derived from temperature data between 1960-1990. These data are useful
#' for terrestrial populations.
#' Reference: <NAME>., <NAME>, <NAME>, <NAME> and <NAME>, 2005. Very high resolution interpolated climate surfaces
#' for global land areas. International Journal of Climatology 25: 1965-1978.
#'
#' These data can be linked to the time series data through the LocationID number. See http://www3.imperial.ac.uk/cpb/databases/gpdd for more
#' information on the GPDD metadata organization.
#'
#' @name temperature
#' @docType data
#' @author <NAME> \email{<EMAIL>}
#' @references \url{http://www.worldclim.org/version1}
NULL<file_sep>#' A function to join GPDD time series data with trait data.
#'
#' This function calls the taxonomic data table from rgpdd, and joins it with the life history data using the TaxonID index.
#' In addition, this function can be used to join to the GPDD main data table, which contains indexes for all other data tables.
#' @param taxon Do you want to include taxonomic information from the gpdd_taxon data table? Default is FALSE.
#' @param main Do you want to include information on time series from the gpdd_main data table? Default is TRUE.
#' @param sources Do you want to include information on data sources for the life history data? Default is FALSE.
#' @examples data <- gpdd_taxon_life_history()
#' head(data)
#' data_taxon_sources <- gpdd_taxon_life_history(taxon=TRUE, main=FALSE, sources=TRUE)
#' head(data_taxon_sources)
#' @import dplyr
#' @import rgpdd
#' @export
gpdd_taxon_life_history <- function(taxon=FALSE, main=TRUE, sources=FALSE){
if(taxon == TRUE){
if(main == FALSE & sources == FALSE){
df <- dplyr::left_join(rgpdd::gpdd_taxon, rgpddPlusTraits::life_history, by = "TaxonID")
} else{
if(main == TRUE & sources == TRUE){
df <- dplyr::left_join(rgpdd::gpdd_taxon, rgpddPlusTraits::life_history, by = "TaxonID")
df <- dplyr::left_join(df, rgpdd::gpdd_main, by = "TaxonID")
df <- dplyr::left_join(df, rgpddPlusTraits::life_history_sources, by = "TaxonID")
} else{
if(main == TRUE & sources == FALSE){
df <- dplyr::left_join(rgpdd::gpdd_taxon, rgpddPlusTraits::life_history, by = "TaxonID")
df <- dplyr::left_join(df, rgpdd::gpdd_main, by = "TaxonID")
} else{
df <- dplyr::left_join(rgpdd::gpdd_taxon, rgpddPlusTraits::life_history, by = "TaxonID")
df <- dplyr::left_join(df, rgpddPlusTraits::life_history_sources, by = "TaxonID")
}
}
}
} else{
if(main == FALSE & sources == FALSE){
df <- 0
} else{
if(main == TRUE & sources == TRUE){
df <- dplyr::left_join(rgpddPlusTraits::life_history, rgpdd::gpdd_main, by = "TaxonID")
df <- dplyr::left_join(df, rgpddPlusTraits::life_history_sources, by = "TaxonID")
} else{
if(main == TRUE & sources == FALSE){
df <- dplyr::left_join(rgpddPlusTraits::life_history, rgpdd::gpdd_main, by = "TaxonID")
} else{
df <- dplyr::left_join(rgpddPlusTraits::life_history, rgpddPlusTraits::life_history_sources, by = "TaxonID")
}
}
}
}
return(df)
}<file_sep># rgpddPlusTraits
The purpose of this package is to make available species-specific life history trait information for mammal, bird, and fish species in the global population dynamics database (GPDD). In addition, location-specific data on average monthly and annual temperature are included in a separate data file. These data are indexed with identification numbers for the GPDD metadata tables. This package is designed to interface with the package 'rgpdd,' which contains all data tables from the original GPDD. Functions were created in order to easily join life history and temperature data with individual time series data and metadata from the 'rgpdd'.
Life history data were assembled manually from various published sources, however this data compilation is not itself published. The developers of the Global Population Dynamics Database and the R package 'rgpdd' are not responsible for the accuracy of this data.
For more information on the Gloabl Population Dynamics Database and metadata organization, see http://www3.imperial.ac.uk/cpb/databases/gpdd.
For more information on the package 'rgpdd,' see https://github.com/ropensci/rgpdd.
## Quickstart
### Install and load package:
```
library(devtools)
devtools::install_github("daan4786/rgpddPlusTraits")
library(rgpddPlusTraits)
# While rgpddPlusTraits has functions to integrate the trait/temperature data with the GPDD meta data tables,
# in order to work with the actual time series data you must load the package rgpdd.
devtools::install_github("ropensci/rgpdd")
library(rgpdd)
library(dplyr)
library(ggplot2)
```
### Integrate life history data with gpdd main table using function available in rgpddPlusTraits:
```
life_history <- gpdd_taxon_life_history(taxon=TRUE, main=TRUE, sources=FALSE)
```
### An example analysis looking at the effect of body mass on average abundance of mammals, birds, and fish:
```
# Calculate average population size of each time series
avg_abundance <- rgpdd::gpdd_data %>% group_by(MainID) %>% summarize(avg_abundance = mean(PopulationUntransformed))
# Join with life history data for plotting
avg_abundance_life_history <- dplyr::left_join(avg_abundance, life_history, by = "MainID")
ggplot(dplyr::filter(avg_abundance_life_history, DatasetLength > 20, (TaxonomicClass == "Mammalia" | TaxonomicClass == "Aves" | TaxonomicClass == "Osteichthyes"))) +
geom_point(aes(x = Mass_kg, y = avg_abundance, color = TaxonomicClass)) +
geom_smooth(aes(x = Mass_kg, y = avg_abundance), method = "lm", se = F, color = "black") +
scale_x_log10() + scale_y_log10() + ylab("Average abundance (variable spatial units)") + xlab("Body Mass (kg)")
```

|
6bffc90cdc039fb810ebc4b004b72e757183e79d
|
[
"Markdown",
"R"
] | 5 |
R
|
daan4786/rgpddPlusTraits
|
0586493569ffda9398ec033a76e3920908b38149
|
2b82f45c08ca04808b4fa214f5c897013f895ed9
|
refs/heads/master
|
<file_sep>local shortport = require "shortport"
local http = require("http")
local string = require("string")
description = [[ifconfig.me External IP Lookup]]
author = "Red5d"
license = "Same as Nmap--See http://nmap.org/book/man-legal.html"
categories = {"external"}
-- No host/port/etc requirements for running this script
prerule = function()
return true
end
action = function(host, port)
-- HTTP GET request to https://ifconfig.me/
res = http.get("ifconfig.me", 443, "/")
-- Regex to find the IP address
local ipaddr = res.body:match("ip_address\">([^<]+)")
-- Return a string with the IP address
return "External IP Address: " .. ipaddr
end
<file_sep>local shortport = require "shortport"
http = require "http"
stdnse = require "stdnse"
description = [[CVE-2017-12542 HP iLO firmware vuln scanner]]
author = "Red5d"
license = "Same as Nmap--See http://nmap.org/book/man-legal.html"
categories = {"discovery"}
portrule = shortport.http
action = function(host, port)
res = http.get(host.ip, 443, "/xmldata?item=ALL")
local version = stdnse.strsplit("<FWRI>", res.body)[2]
local version = stdnse.strsplit("</FWRI>", version)[1]
output = {}
table.insert(output, "HP iLO Firmware Version: " .. version)
if 2.3 <= tonumber(version) and tonumber(version) <= 2.5 then
table.insert(output, "Vulnerable: yes")
else
table.insert(output, "Vulnerable: no")
end
return output
end
<file_sep># nse-scripts
My Nmap scripts for the Nmap Scripting Engine
<file_sep>local http = require "http"
local shortport = require "shortport"
local stdnse = require "stdnse"
description = [[Southeast LinuxFest Example]]
-- Usage: nmap --script selinuxfest southeastlinuxfest.org -p 443
author = "Red5d"
license = "Same as Nmap--See https://nmap.org/book/man-legal.html"
categories = {"discovery", "safe"}
-- Only run when http-related ports are included
portrule = shortport.http
action = function(host, port)
-- Perform HTTP GET request
resp = http.get( host, port, "/")
-- Regex on the response body to find info
local latest_post = resp.body:match("bookmark\">([^<]+)")
local last_updated = resp.body:match("timestamp updated\">([^<]+)")
local author = resp.body:match('posts by ([^"]+)')
-- Create an output table and load the info into it
local output_tab = stdnse.output_table()
output_tab.latest_post = latest_post
output_tab.last_updated = last_updated
output_tab.author = author
-- Return the output table
return output_tab
end<file_sep>local shortport = require "shortport"
local http = require("http")
local stringaux = require("stringaux")
description = [[Tasmota device name and version scanner]]
author = "Red5d"
license = "Same as Nmap--See http://nmap.org/book/man-legal.html"
categories = {"discovery"}
portrule = shortport.http
action = function(host, port)
res = http.get(host.ip, 80, "/")
if http.response_contains(res, "Tasmota") then
local name = res.body:match("<h2>([^<]+)<")
local moduleType = res.body:match("<h3>([^<]+)<")
local versionline = stringaux.strsplit("Tasmota ", res.body)[2]
local version = stringaux.strsplit(" by ", versionline)[1]
output = {}
table.insert(output, "Name: " .. name)
table.insert(output, "Module: " .. moduleType)
table.insert(output, "Version: " .. version)
return output
end
end
|
4f231ae39c89f66413cbd50b84abd458ce1c5308
|
[
"Markdown",
"Lua"
] | 5 |
Lua
|
Red5d/nse-scripts
|
8d2ac16557adc55e37f77dae72d3d204ffddc868
|
06be716cf3c53d1fc80161a05da1a4a8c6f5ef54
|
refs/heads/master
|
<file_sep># --- Created by Ebean DDL
# To stop Ebean DDL generation, remove this comment and start using Evolutions
# --- !Ups
create table jeu (
nom varchar(255) not null,
constraint pk_jeu primary key (nom))
;
create table news (
id integer not null,
titre varchar(255),
contenu varchar(255),
type_n integer,
auteur_email varchar(255),
constraint ck_news_type_n check (type_n in (0,1,2)),
constraint pk_news primary key (id))
;
create table project (
id bigint not null,
name varchar(255),
folder varchar(255),
constraint pk_project primary key (id))
;
create table score (
id integer not null,
valeur float,
auteur_email varchar(255),
jeu_nom varchar(255),
constraint pk_score primary key (id))
;
create table user (
email varchar(255) not null,
name varchar(255),
password <PASSWORD>),
is_admin boolean,
constraint pk_user primary key (email))
;
create table project_user (
project_id bigint not null,
user_email varchar(255) not null,
constraint pk_project_user primary key (project_id, user_email))
;
create sequence jeu_seq;
create sequence news_seq;
create sequence project_seq;
create sequence score_seq;
create sequence user_seq;
alter table news add constraint fk_news_auteur_1 foreign key (auteur_email) references user (email) on delete restrict on update restrict;
create index ix_news_auteur_1 on news (auteur_email);
alter table score add constraint fk_score_auteur_2 foreign key (auteur_email) references user (email) on delete restrict on update restrict;
create index ix_score_auteur_2 on score (auteur_email);
alter table score add constraint fk_score_jeu_3 foreign key (jeu_nom) references jeu (nom) on delete restrict on update restrict;
create index ix_score_jeu_3 on score (jeu_nom);
alter table project_user add constraint fk_project_user_project_01 foreign key (project_id) references project (id) on delete restrict on update restrict;
alter table project_user add constraint fk_project_user_user_02 foreign key (user_email) references user (email) on delete restrict on update restrict;
# --- !Downs
SET REFERENTIAL_INTEGRITY FALSE;
drop table if exists jeu;
drop table if exists news;
drop table if exists project;
drop table if exists project_user;
drop table if exists score;
drop table if exists user;
SET REFERENTIAL_INTEGRITY TRUE;
drop sequence if exists jeu_seq;
drop sequence if exists news_seq;
drop sequence if exists project_seq;
drop sequence if exists score_seq;
drop sequence if exists user_seq;
<file_sep>@(user: User, message: String, scoresList: List[Score])
@page("Nsih - Espace perso") {
<h1>Espace personnel</h1>
@if(user != null) {
Connecté en tant que @user.email.
}
<br>
<a href="@routes.Application.logout()">Se déconnecter</a>
<br><br>
Vos scores:
<br>
<ul>
@for(score <- scoresList) {
<li>@score.jeu.nom, @score.valeur</li>
}
</ul>
}<file_sep>nsih
====
Rendu le 8 Juin 2013
====
<NAME>, <NAME>
<file_sep>package models;
import javax.persistence.*;
import play.db.ebean.*;
import com.avaje.ebean.*;
@Entity
public class News extends Model {
public enum TypeNews {
INFO,
EVENT,
SCORES
}
@Id
public int id;
public String titre;
public String contenu;
public TypeNews typeN;
@ManyToOne
public User auteur;
public News(int id, String titre, String contenu, TypeNews type) {
this.id = id;
this.titre = titre;
this.contenu = contenu;
this.typeN = type;
}
public static Model.Finder<String,News> find = new Model.Finder<String,News>(
String.class, News.class
);
}
|
20d06107e8bf65b13dc1f41af21af268feb1bfbe
|
[
"Markdown",
"SQL",
"Java",
"HTML"
] | 4 |
SQL
|
Setrag/nsih
|
8885fddb9640de360e65930dc153c8e9aa8f1c67
|
88b5ba306afa08ea29ce9c348b16ef1005dd4593
|
refs/heads/main
|
<repo_name>Josessantos5034/Digitalizacion_de_procesos_en_la_toma_de_datos_en_cerdos-<file_sep>/node js/index.js
const TelegramBot = require('node-telegram-bot-api');
//var mqtt = require('mqtt');/
var Constantes = require('./Token');
const bot = new TelegramBot(Constantes,token, {
polling: true
});
var Idmichat = 923905754;
///ES EL URL DEL TOKEN, EL SOLO SE GENERA DEPUES DE CREAR EL TOKEN/
//var client = mqtt.connect('mqtt://cerdos-prueba:pEcontDiDMEv<EMAIL>');/
var SerialPort = require('serialport');
var MiPuerto = new SerialPort('COM4');
bot.on('message', (msg) => {
const chatId = msg.chat.id;
console.log("El ID del chat" + chatId);
var Mensaje = msg.text.toString().toLocaleLowerCase();
if (Mensaje.indexOf("encender") >=0){
console.log("encendiendo el led");
bot.sendMessage(chatId, 'ON');
client.publish('temp','1');
}
else if(Mensaje.indexOf("apagar") >=0){
console.log("apagando el led");
bot.sendMessage(chatId, 'OFF');
client.publish('temp','0');
}
});
<file_sep>/node js/Token.js
module.exports = {
token : '<KEY>',
};
<file_sep>/README.md
# Digitalizacion de toma de datos en cria de cerdos
INTRODUCCION
La porcicultura es un gran rubro que se encuentra presente en todos los lugares de mundo, ya que a partir ello los mercados locales e internacionales puede ser surtidos. En algunos casos este sector es descuidado o no cuenta con las herramientas tecnológicas necesarias para poder trabajar y optimizar procesos importantes durante el periodo de cría de estos animales.
En la presente investigación se recopila toda la información del proyecto “Digitalización en los procesos de toma de datos para la crianza de cerdos de granja”. En el cual se hace uso de diversas áreas, de tal manera poder desarrollar un proceso de recolección de datos y su almacenamiento en formato digital.
Dentro de la investigación se tiene diferentes temas como; microcontroladores, celdas de carga, módulos de cámara, drivers y entre muchos más que serán mencionado en el desarrollo del trabajo, para dar un conocimiento básico, así como entendible para usuarios finales y personas capacitadas en el área.
El principal motivo de esta investigación es la digitalización de una parte de los procesos que se realizan en el área de la porcicultura, para así tener datos precisos, registros concretos y así prevenir cualquier situación a futuro que pueda afectar al ganado.
OBJETIVOS PLANTEADOS:
Objetivo general
• Diseñar una propuesta para la digitalización en los procesos de toma de datos para la crianza de cerdos en granjas.
Objetivos específicos
• Registrar el proceso de investigación y desarrollo de la digitalización en los procesos de toma de datos para la crianza de cerdos en granjas.
• Emplear herramientas tecnológicas para facilitar algunos procesos dentro de la crianza de cerdos.
• Diseñar un manual para los usuarios, este funcionara de guía para usar correctamente el proyecto.
ALCANCES DEL PROYECTO:
1. Registrar el proceso de investigación y desarrollo de la digitalización en los procesos de toma de datos para la crianza de cerdos en granjas.
2. Diseñar un manual para los usuarios, este funcionara de guía para usar correctamente el proyecto.
3. Emplear herramientas tecnológicas para facilitar algunos procesos dentro de la crianza de cerdos.
COMPOTENTES PARA ELABORAR EL PROTOTIPO:
- Celda de carga de 500kg

- Amplificador HX711

- ESP32 wifi

- Modulo de camara ESP32

- Sensor de temperatura MLX90614

- LCD 16x2 I2C

***
LENGUAJES DE PROGRAMACION Y PLATAFORMAS UTILIZADAS:
- ARDUINO
- JAVASCRIPT
- NODE JS
- BOOT TELEGRAM
***
### DISEÑO DE LA ESTRUCTURA DE LA BASCULA DIGITAL:

<file_sep>/codigos/CHATBOT_BALANZA_TEMPERATURA/CHATBOT_BALANZA_TEMPERATURA.ino
//https://randomnerdtutorials.com/telegram-request-esp32-esp8266-nodemcu-sensor-readings/
//https://blog.330ohms.com/2021/03/09/crea-tu-propio-bot-de-telegram-con-esp32/
//https://github.com/witnessmenow/Universal-Arduino-Telegram-Bot
//https://randomnerdtutorials.com/telegram-control-esp32-esp8266-nodemcu-outputs/
//
#include "HX711.h" //You must have this library in your arduino library folder
#define LOADCELL_DOUT_PIN 22
#define LOADCELL_SCK_PIN 23
float calibration_factor = 4034.09091;
HX711 scale;
#ifdef ESP32
#include <WiFi.h>
#else
#include <ESP8266WiFi.h>
#endif
#include <WiFiClientSecure.h>
#include <UniversalTelegramBot.h> // Universal Telegram Bot Library written by <NAME>: https://github.com/witnessmenow/Universal-Arduino-Telegram-Bot
#include <ArduinoJson.h>
// Replace with your network credentials
const char ssid[] = "RED-111";
const char pass[] = "<PASSWORD>";
const char ssid2[] = "TPS";
const char pass2[] = "<PASSWORD>";
// Use @myidbot to find out the chat ID of an individual or a group
// Also note that you need to click "start" on a bot before it can
// message you
#define CHAT_ID "923905754"
// Initialize Telegram BOT
#define BOTtoken "<KEY>" // your Bot Token (Get from Botfather)
#ifdef ESP8266
X509List cert(TELEGRAM_CERTIFICATE_ROOT);
#endif
WiFiClientSecure client;
UniversalTelegramBot bot(BOTtoken, client);
//Checks for new messages every 1 second.
int botRequestDelay = 1000;
unsigned long lastTimeBotRan;
// BME280 connect to ESP32 I2C (GPIO 21 = SDA, GPIO 22 = SCL)
// BME280 connect to ESP8266 I2C (GPIO 4 = SDA, GPIO 5 = SCL)
Adafruit_BME280 bme;
// Get BME280 sensor readings and return them as a String variable
String getReadings() {
float temperature, humidity;
temperature = bme.readTemperature();
humidity = bme.readHumidity();
String message = "Temperature: " + String(temperature) + " ºC \n";
message += "Humidity: " + String (humidity) + " % \n";
return message;
}
//Handle what happens when you receive new messages
void handleNewMessages(int numNewMessages) {
Serial.println("handleNewMessages");
Serial.println(String(numNewMessages));
for (int i = 0; i < numNewMessages; i++) {
// Chat id of the requester
String chat_id = String(bot.messages[i].chat_id);
if (chat_id != CHAT_ID) {
bot.sendMessage(chat_id, "Unauthorized user", "");
continue;
}
// Print the received message
String text = bot.messages[i].text;
Serial.println(text);
String from_name = bot.messages[i].from_name;
if (text == "/start") {
String welcome = "Welcome, " + from_name + ".\n";
welcome += "Use the following command to get current readings.\n\n";
welcome += "/readings \n";
bot.sendMessage(chat_id, welcome, "");
}
if (text == "/readings") {
String readings = getReadings();
bot.sendMessage(chat_id, readings, "");
}
}
}
void setup() {
Serial.begin(9600);
Serial.println("HX711 Calibration");
Serial.println("Remove all weight from scale");
Serial.println("After readings begin, place known weight on scale");
Serial.println("Press a,s,d,f to increase calibration factor by 10,100,1000,10000 respectively");
Serial.println("Press z,x,c,v to decrease calibration factor by 10,100,1000,10000 respectively");
Serial.println("Press t for tare");
scale.begin(LOADCELL_DOUT_PIN, LOADCELL_SCK_PIN);
scale.set_scale();
scale.tare(); //Reset the scale to 0
long zero_factor = scale.read_average(); //Get a baseline reading
Serial.print("Zero factor: "); //This can be used to remove the need to tare the scale. Useful in permanent scale projects.
Serial.println(zero_factor);
#ifdef ESP8266
configTime(0, 0, "pool.ntp.org"); // get UTC time via NTP
client.setTrustAnchors(&cert); // Add root certificate for api.telegram.org
#endif
// Init BME280 sensor
if (!bme.begin(0x76)) {
Serial.println("Could not find a valid BME280 sensor, check wiring!");
while (1);
}
// Connect to Wi-Fi
WiFi.mode(WIFI_STA);
WiFi.begin(ssid, password);
#ifdef ESP32
client.setCACert(TELEGRAM_CERTIFICATE_ROOT); // Add root certificate for api.telegram.org
#endif
while (WiFi.status() != WL_CONNECTED) {
delay(1000);
Serial.println("Connecting to WiFi..");
}
// Print ESP32 Local IP Address
Serial.println(WiFi.localIP());
}
void loop() {
if (millis() > lastTimeBotRan + botRequestDelay) {
int numNewMessages = bot.getUpdates(bot.last_message_received + 1);
while (numNewMessages) {
Serial.println("got response");
handleNewMessages(numNewMessages);
numNewMessages = bot.getUpdates(bot.last_message_received + 1);
}
lastTimeBotRan = millis();
}
scale.set_scale(calibration_factor); //Adjust to this calibration factor
Serial.print("Reading: ");
Serial.print(scale.get_units(), 1);
Serial.print(" lb"); //Change this to kg and re-adjust the calibration factor if you follow SI units like a sane person
Serial.println();
if (Serial.available())
{
char temp = Serial.read();
if (temp == '+' || temp == 'a')
calibration_factor += 10;
else if (temp == '-' || temp == 'z')
calibration_factor -= 10;
else if (temp == 's')
calibration_factor += 100;
else if (temp == 'x')
calibration_factor -= 100;
else if (temp == 'd')
calibration_factor += 1000;
else if (temp == 'c')
calibration_factor -= 1000;
else if (temp == 'f')
calibration_factor += 10000;
else if (temp == 'v')
calibration_factor -= 10000;
else if (temp == 't')
scale.tare(); //Reset the scale to zero
}
}
|
3e25f11c3473332f517fcf5d63caa56929cde04a
|
[
"JavaScript",
"C++",
"Markdown"
] | 4 |
JavaScript
|
Josessantos5034/Digitalizacion_de_procesos_en_la_toma_de_datos_en_cerdos-
|
c18c62d7d829e1de8f99c9842ee183ae0475d9f3
|
d2f564ff58b2475a1ae5428cd5bfa24f05348a34
|
refs/heads/master
|
<file_sep>import { Component, Input } from '@angular/core';
@Component({
selector: 'app-pet-list-item',
templateUrl: './pet-list-item.component.html'
})
export class PetListItemComponent {
@Input() name;
@Input() description = 'Um animal dócil que foi perdido no centreventos';
// tslint:disable-next-line:max-line-length
photo = 'https://thenypost.files.wordpress.com/2018/05/180516-woman-mauled-by-angry-wiener-dogs-feature.jpg?quality=90&strip=all&w=200&h=200&crop=1';
}
<file_sep>import { Component, OnInit } from '@angular/core';
import { PetOptions } from 'src/app/shared/pet-list-item/pet-list-item.options';
@Component({
selector: 'app-home',
templateUrl: './home.component.html',
styleUrls: ['./home.component.scss']
})
export class HomeComponent implements OnInit {
lostPets: PetOptions[] = [{
name: 'Roberto',
description: 'Descrição do pet'
}, {
name: 'Robertoso',
description: 'Descrição do pet'
}, {
name: 'Adolfo',
description: 'Descrição do pet'
}, {
name: 'Rogério',
description: 'Descrição do pet'
}];
adoptionPets: PetOptions[] = [{
name: 'Romário',
description: 'Descrição do pet'
}, {
name: 'Adão',
description: 'Descrição do pet'
}];
constructor() { }
ngOnInit() {
}
}
<file_sep>import { Component } from '@angular/core';
import { PetOptions } from './shared/pet-list-item/pet-list-item.options';
@Component({
selector: 'app-root',
templateUrl: './app.component.html',
styleUrls: ['./app.component.scss']
})
export class AppComponent {
title = 'academia-dev-frontend';
}
|
882d741cc78c28a60fefd66420b8b27d873d29f2
|
[
"TypeScript"
] | 3 |
TypeScript
|
academiadev-jlle/frontend-angular-deploy-victormafra
|
9e3ef69e088fdf7424d23e6bd5852c10b0453467
|
bef578968bf1576186eab09565e0bd35e56d2220
|
refs/heads/master
|
<repo_name>morinatsu/dotfiles<file_sep>/fuc/git.md
Git
===
```bash
# list Authors
$ git shortlog --summary --email
# remove untracked files from the working tree
$ git clean -d --dry-run
$ git clean -d --force
# search file name in previous commit
$ git log -S <string>
$ git log --pretty=oneline --name-only -S <string>
```
<file_sep>/bash_aliases
#bash_aliases
# less.sh
if [ -f /usr/local/share/vim/vim74/macros/less.sh ]; then
alias less.sh='/usr/local/share/vim/vim74/macros/less.sh'
fi
if [ -f /usr/share/vim/vim74/macros/less.sh ]; then
alias less.sh='/usr/share/vim/vim74/macros/less.sh'
fi
# ctags
alias ctags='ctags -f .tags'
<file_sep>/Makefile
ifeq ($(OS),Windows_NT)
VIMDIR := vimfiles
VIMRC := vimrc
GVIMRC := gvimrc
UNDODIR := .local/undodir
else
VIMDIR := .vim
VIMRC := vimrc
GVIMRC := gvimrc
UNDODIR := .local/undodir
endif
install: install-bash install-vim install-powerline install-byobu install-textlint install-tmux
install-bash:
rm -f ~/.bashrc
ln -s `pwd`/bashrc ~/.bashrc
rm -f ~/.bash_profile
ln -s `pwd`/bash_profile ~/.bash_profile
rm -f ~/.bash_aliases
ln -s `pwd`/bash_aliases ~/.bash_aliases
install-byobu:
rm -rf ~/.byobu
ln -s `pwd`/byobu ~/.byobu
rm -rf ~/.fuc
ln -s `pwd`/fuc ~/.fuc
install-powerline:
@if [ ! -d ~/.config ]; then \
mkdir -f ~/.config; \
fi
rm -f ~/.config/powerline
ln -s `pwd`/powerline ~/.config/powerline
install-vim:
rm -rf ~/$(VIMDIR)
ln -s `pwd`/vim ~/$(VIMDIR)
curl https://raw.githubusercontent.com/Shougo/dein.vim/master/bin/installer.sh > ./installer.sh
sh ./installer.sh `pwd`/vim/dein
rm -f ./installer.sh
rm -rf ~/$(UNDODIR)
mkdir -p ~/$(UNDODIR)
install-textlint:
rm -f ~/.textlintrc
ln -s `pwd`/textlintrc ~/.textlintrc
install-tmux:
rm -rf ~/.tmux
rm -f ~/.tmux.conf
ln -s `pwd`/tmux ~/.tmux
ln -s `pwd`/tmux/tmux.conf ~/.tmux.conf
<file_sep>/bash_profile
#########################################################################
# .bash_profile: Personal initialisation file for bash #
#########################################################################
# This script file is executed by bash(1) for login shells. By default,
# it does nothing, as ~/.bashrc is already sourced by /etc/profile.
#
# [JNZ] Modified 23-Sep-2004
#
# Written by <NAME> and released into the public domain.
# local binary directories
export PATH=~/.local/bin:$PATH
export PATH=$PATH:/usr/local/go/bin
export PATH="$PATH:$HOME/.rvm/bin"
# init pyenv
if [ -d "$HOME/.pyenv/" ]; then
export PYENV_ROOT="$HOME/.pyenv"
command -v pyenv >/dev/null || export PATH="$PYENV_ROOT/bin:$PATH"
eval "$(pyenv init -)"
fi
# init anyenv
export PATH="$HOME/.anyenv/bin:$PATH"
if [ -d "$HOME/.anyenv" ]; then
eval "$(anyenv init -)"
fi
test -r ~/.bashrc && . ~/.bashrc
[[ -s "$HOME/.rvm/scripts/rvm" ]] && source "$HOME/.rvm/scripts/rvm" # Load RVM into a shell session *as a function*
# --- for tiny-care-terminal ---
# List of accounts to read the last tweet from, comma separated
# The first in the list is read by the party parrot.
export TTC_BOTS='tinycarebot,selfcare_bot,magicrealismbot'
# Use this to have a different animal say a message in the big box.
export TTC_SAY_BOX="cat"
# List of folders to look into for `git` commits, comma separated.
export TTC_REPOS='~/dotfiles,~/projects'
# The max directory-depth to look for git repositories in
# the directories defined with `TTC_REPOS`. Note that the deeper
# the directory depth, the slower the results will be fetched.
export TTC_REPOS_DEPTH=2
# Location/zip code to check the weather for. Both 90210 and "San Francisco, CA"
# _should_ be ok (the zip code doesn't always work -- use a location
# first, if you can). It's using weather.service.msn.com behind the curtains.
export TTC_WEATHER='Tokyo'
# Set to false if you're an imperial savage. <3
export TTC_CELSIUS=true
# Unset this if you _don't_ want to use Twitter keys and want to
# use web scraping instead.
export TTC_APIKEYS=false
#
# Refresh the dashboard every 20 minutes.
export TTC_UPDATE_INTERVAL=20
# Twitter api keys
export TTC_CONSUMER_KEY='...'
export TTC_CONSUMER_SECRET='...'
export TTC_ACCESS_TOKEN='...'
export TTC_ACCESS_TOKEN_SECRET='...'
. "$HOME/.cargo/env"
<file_sep>/bashrc
# ~/.bashrc: executed by bash(1) for non-login shells.
# see /usr/share/doc/bash/examples/startup-files (in the package bash-doc)
# for examples
# If not running interactively, don't do anything
[ -z "$PS1" ] && return
# don't put duplicate lines or lines starting with space in the history.
# See bash(1) for more options
HISTCONTROL=ignoreboth
# append to the history file, don't overwrite it
shopt -s histappend
export PROMPT_COMMAND="history -a; history -n"
# for setting history length see HISTSIZE and HISTFILESIZE in bash(1)
HISTSIZE=1000
HISTFILESIZE=2000
# check the window size after each command and, if necessary,
# update the values of LINES and COLUMNS.
shopt -s checkwinsize
# If set, the pattern "**" used in a pathname expansion context will
# match all files and zero or more directories and subdirectories.
#shopt -s globstar
# make less more friendly for non-text input files, see lesspipe(1)
[ -x /usr/bin/lesspipe ] && eval "$(SHELL=/bin/sh lesspipe)"
# set variable identifying the chroot you work in (used in the prompt below)
if [ -z "$debian_chroot" ] && [ -r /etc/debian_chroot ]; then
debian_chroot=$(cat /etc/debian_chroot)
fi
# set a fancy prompt (non-color, unless we know we "want" color)
case "$TERM" in
xterm-color) color_prompt=yes;;
esac
# uncomment for a colored prompt, if the terminal has the capability; turned
# off by default to not distract the user: the focus in a terminal window
# should be on the output of commands, not on the prompt
#force_color_prompt=yes
if [ -n "$force_color_prompt" ]; then
if [ -x /usr/bin/tput ] && tput setaf 1 >&/dev/null; then
# We have color support; assume it's compliant with Ecma-48
# (ISO/IEC-6429). (Lack of such support is extremely rare, and such
# a case would tend to support setf rather than setaf.)
color_prompt=yes
else
color_prompt=
fi
fi
if [ "$color_prompt" = yes ]; then
PS1='${debian_chroot:+($debian_chroot)}\[\033[01;32m\]\u@\h\[\033[00m\]:\[\033[01;34m\]\w\[\033[00m\]\$ '
else
PS1='${debian_chroot:+($debian_chroot)}\u@\h:\w\$ '
fi
unset color_prompt force_color_prompt
# If this is an xterm set the title to user@host:dir
case "$TERM" in
xterm*|rxvt*)
PS1="\[\e]0;${debian_chroot:+($debian_chroot)}\u@\h: \w\a\]$PS1"
;;
*)
;;
esac
# enable color support of ls and also add handy aliases
if [ -x /usr/bin/dircolors ]; then
test -r ~/.dircolors && eval "$(dircolors -b ~/.dircolors)" || eval "$(dircolors -b)"
alias ls='ls --color=auto'
#alias dir='dir --color=auto'
#alias vdir='vdir --color=auto'
alias grep='grep --color=auto'
alias fgrep='fgrep --color=auto'
alias egrep='egrep --color=auto'
fi
# some more ls aliases
alias ll='ls -alF'
alias la='ls -A'
alias l='ls -CF'
alias bagend='ssh <EMAIL>'
# Add an "alert" alias for long running commands. Use like so:
# sleep 10; alert
alias alert='notify-send --urgency=low -i "$([ $? = 0 ] && echo terminal || echo error)" "$(history|tail -n1|sed -e '\''s/^\s*[0-9]\+\s*//;s/[;&|]\s*alert$//'\'')"'
# Alias definitions.
# You may want to put all your additions into a separate file like
# ~/.bash_aliases, instead of adding them here directly.
# See /usr/share/doc/bash-doc/examples in the bash-doc package.
if [ -f ~/.bash_aliases ]; then
. ~/.bash_aliases
fi
# enable programmable completion features (you don't need to enable
# this, if it's already enabled in /etc/bash.bashrc and /etc/profile
# sources /etc/bash.bashrc).
if [ -f /etc/bash_completion ] && ! shopt -oq posix; then
. /etc/bash_completion
fi
# setup pyenv
export PYENV_ROOT="${HOME}/.pyenv"
if [ -d "${PYENV_ROOT}" ]; then
export PATH=${PYENV_ROOT}/bin:$PATH
eval "$(pyenv init -)"
fi
# virtualenvwrapper
export VIRTUALENV_USE_DISTRIBUTE=true
if [ -f /usr/local/bin/virtualenvwrapper.sh ]; then
export WORKON_HOME=$HOME/.virtualenvs
source /usr/local/bin/virtualenvwrapper.sh
fi
# google app engine
if [ -d ~/google_appengine ]; then
PATH=$PATH:~/google_appengine
export PATH
fi
# remake vimrc
if [ -f ~/.vimrc ]; then
rm ~/.vimrc
ln -s ~/dotfiles/vim/vimrc ~/.vimrc
fi
# pythonz
[[ -s $HOME/.pythonz/etc/bashrc ]] && source $HOME/.pythonz/etc/bashrc
# set tmux to byobu-backend
export BYOBU_BACKEND=tmux
# powerline
export PATH=$PATH:~/.local/bin
if [ -d "/usr/local/lib/python2.7/dist-packages/powerline/" ]; then
POWERLINE_ROOT="/usr/local/lib/python2.7/dist-packages/powerline/"
fi
if [ -d "/usr/local/lib/python3.5/dist-packages/powerline/" ]; then
POWERLINE_ROOT="/usr/local/lib/python3.5/dist-packages/powerline/"
fi
if [ -d "$HOME/.local/lib/python3.5/site-packages/powerline/" ]; then
POWERLINE_ROOT="$HOME/.local/lib/python3.5/site-packages/powerline/"
fi
if [ -d "$POWERLINE_ROOT" ]; then
# if [ -e $HOME/.local/bin/powerline-daemon ]; then
# $HOME/.local/bin/powerline-daemon -q
# else
# if [ -e /usr/local/bin/powerline-daemon ]; then
# /usr/local/bin/powerline-daemon -q
# fi
# fi
POWERLINE_BASH_CONTINUATION=1
POWERLINE_BASH_SELECT=1
. $POWERLINE_ROOT/bindings/bash/powerline.sh
fi
# for get-git-ignore
function _peco_ggi_list () {
lang=$(get-git-ignore | peco)
if [ -n "$lang" ]; then
get-git-ignore --lang=$lang
fi
}
alias ggi=_peco_ggi_list
function peco-hist() {
time_column=`echo $HISTTIMEFORMAT | awk '{printf("%s",NF)}'`
column=`expr $time_column + 3`
cmd=`history | tac | peco | sed -e 's/^ //' | sed -e 's/ +/ /g' | cut -d " " -f $column-`
history -s "$cmd"
eval $cmd
}
if ls --color=auto --show-control-char >/dev/null 2>&1;then
alias ls='ls --color=auto --show-control-char'
alias la='ls -A --color=auto --show-control-char'
else
alias ls='ls --color=auto'
alias la='ls -A --color=auto'
fi
if [[ "$OSTYPE" =~ cygwin ]];then
export CYGWIN="winsymlinks $CYGWIN"
fi
if [ -d "$HOME/node_modules/.bin" ]; then
export PATH="$HOME/node_modules/.bin:$PATH"
fi
if [ -d "$HOME/.local/go" ]; then
export GOPATH="$HOME/.local/go"
export PATH="$HOME/.local/go/bin:$PATH"
fi
if [ -f "$HOME/.peco-anyenv/peco-anyenv.sh" ]; then
source "$HOME/.peco-anyenv/peco-anyenv.sh"
fi
# $PATH operate
path_append () { path_remove $1; export PATH="$PATH:$1"; }
path_prepend () { path_remove $1; export PATH="$1:$PATH"; }
path_remove () { export PATH=`echo -n $PATH | awk -v RS=: -v ORS=: '$0 != "'$1'"' | sed 's/:$//'`; }
export NVM_DIR="$HOME/.nvm"
[ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" # This loads nvm
[ -s "$NVM_DIR/bash_completion" ] && \. "$NVM_DIR/bash_completion" # This loads nvm bash_completion
. "$HOME/.cargo/env"
# pyenv
if [ -d "$HOME/.pyenv" ]; then
export PYENV_ROOT="$HOME/.pyenv"
command -v pyenv >/dev/null || export PATH="$PYENV_ROOT/bin:$PATH"
eval "$(pyenv init -)"
fi
<file_sep>/fuc/misc.md
MISC
====
```bash
# log directory
$ pushd /var/log
```
```bash
# arp-scan
$ arp-scan -I wlp1s0 -l
```
```bash
# purge old linux-image
$ dpkg --get-selections | grep linux-image
$ sudo aptitude purge linux-image-?.?.?-??-generic linux-image-extra-?.?.?-??-generic
```
```bash
# ansible
$ ansible-playbook playbook.yml -i ./hosts -C --ask-vault-pass
$ ansible-playbook playbook.yml -i ./hosts -C --ask-become-pass
```
```vim
# vim for debug
$ vim -N -u ~/projects/min-vimrc/vimrc -U NONE -i NONE --noplugin
'''
<file_sep>/fuc/README.md
# my fuc
My frequently used commands
|
ea8b62276674e0718bdaf46662fbb029ac6bd2a6
|
[
"Markdown",
"Makefile",
"Shell"
] | 7 |
Markdown
|
morinatsu/dotfiles
|
1b8543ced1f456fd05e6b63940ad958bc869956b
|
1ffde01dbf939f8b74845a87c07118187fc152dc
|
refs/heads/master
|
<file_sep>const RESOURCE_CACHE = 'resources-v1';
var urlsToCache = [
'/',
'/player.css',
'/player.js'
];
const TRACK_CACHE = 'tracks-v1';
const IMG_CACHE = 'images-v1';
const POLL_CACHE = 'polls-v1';
var waitingPolls = {};
// To prevent the same thing being downloaded multiple times
var fetchingTracks = {};
var fetchingImages = {};
var erroringTracks = {};
var registration = self.registration;
self.addEventListener('install', function swInstalled(event) {
event.waitUntil(refreshResources());
});
self.addEventListener('fetch', function respondToFetch(event) {
var url = new URL(event.request.url);
if (event.request.method == "GET") {
// HACK: chrome doesn't support ignoreSearch yet http://crbug.com/520784
var fakeurl = new URL(event.request.url);
fakeurl.search = '';
var fakerequest = new Request(fakeurl.href);
var responsePromise = caches.match(fakerequest).then(function serveFromCache(response) {
if (response) {
if (event.request.url.startsWith(dataOrigin+"poll")) {
return response.clone().json().then(function pollFromCache (data) {
return new Promise(function pollPromiser (resolve) {
if (data.hashcode != url.searchParams.get("hashcode")) {
resolve(response);
} else {
if (!waitingPolls[url.pathname]) waitingPolls[url.pathname] = [];
waitingPolls[url.pathname].push(resolve);
}
});
});
}
return response;
}
return fetch(event.request);
}).then(function postResponseHandler(response) {
if (url.pathname == "/poll/summary") {
return response.clone().json()
.then(function (data) {
if (data.tracks) data.tracks.forEach(function (track) {
var trackUrl = new URL(track.url);
if (tracksCached.isCached(trackUrl.href)) track.cached = true;
if (trackUrl.href in fetchingTracks) track.caching = true;
if (trackUrl.href in erroringTracks) track.erroring = erroringTracks[trackUrl.href];
});
return new Response(new Blob([JSON.stringify(data)]));
});
}
return response;
});
event.respondWith(responsePromise);
// When the main page is requested, try updating the resources from the network asynchronously.
if (url.pathname == "/") {
responsePromise.then(refreshResources);
}
} else if (event.request.method == "POST") {
var postPromise;
switch (url.pathname) {
case "/done":
postPromise = modifySummary.trackDone(url.searchParams.get("track"));
break;
case "/play":
postPromise = modifySummary.play();
break;
case "/pause":
postPromise = modifySummary.pause();
break;
case "/update":
postPromise = modifySummary.update(url.searchParams.get("update_url"),url.searchParams.get("update_time"));
break;
default:
postPromise = new Promise(function (resolve) {resolve()});
}
var responsePromise = postPromise.catch(function (error) {
console.warn("Cached summary not modified", error.message);
}).then(function () {
return registration.sync.register(event.request.url);
}).then(function (){
return new Response(new Blob(), {status: 202, statusText: "Accepted by Service Worker"});
});
event.respondWith(responsePromise);
}
});
self.addEventListener('sync', function backgroundSync(event) {
if (event.tag.startsWith("https://")) {
event.waitUntil(fetch(event.tag, {method: 'POST'}));
}
});
// Load resources for tracks into caches so they're quick to load
function preLoadTrack(trackData) {
if (!trackData || !trackData.url) return;
// Attempt to load the track itself into the track cache
caches.open(TRACK_CACHE).then(function preFetchTrack(cache) {
var trackRequest = new Request(trackData.url);
cache.match(trackRequest).then(function (fromCache) {
if (fromCache || trackRequest.url in fetchingTracks) return;
fetchingTracks[trackRequest.url] = fetch(trackRequest).then(function cacheTrack(trackResponse) {
if (trackResponse.status == 200) {
return cache.put(trackRequest, trackResponse).catch(function (error) {
console.error("Failed to cache track:", error.message);
erroringTracks[trackRequest.url] = error.message;
}).then(function () {
delete fetchingTracks[trackRequest.url];
});
} else {
throw "non-200 response";
}
// If the track wasn't reachable, tell the server, which should skip that one out
}).catch(function trackError(error) {
delete fetchingTracks[trackRequest.url];
modifySummary.trackDone(trackRequest.url);
registration.sync.register(dataOrigin+"done?track="+encodeURIComponent(trackData.url)+"&status=serviceWorkerFailedLookup");
erroringTracks[trackRequest.url] = error.message;
}).then(tracksCached.refresh);
});
});
// Add the track's image into the image cache.
caches.open(IMG_CACHE).then(function preFetchImage(cache) {
var imgRequest = new Request(trackData.metadata.img, {mode: 'no-cors'});
cache.match(imgRequest).then(function (fromCache) {
if (fromCache || imgRequest.url in fetchingImages) return;
// Ideally just do cache.add(imgRequest), but that has poor error handling
fetchingImages[imgRequest.url] = fetch(imgRequest).then(function (response) {
cache.put(imgRequest, response).catch(function (error) {
console.error("Failed to cache image:", error.message);
}).then(function () {
delete fetchingImages[imgRequest.url];
});
});
})
});
}
function poll(url, handleDataFunction, additionalParamFunction, cache) {
if (!url) throw "no URL given to poll";
if (handleDataFunction && typeof handleDataFunction != 'function') throw "handleDataFunction must be a function";
if (additionalParamFunction && typeof additionalParamFunction != 'function') throw "additionalParamFunction must be a function";
function actuallyPoll(hashcode) {
var params = "?";
params += "hashcode="+hashcode;
params += "&_cb="+new Date().getTime();
if (additionalParamFunction) params += additionalParamFunction();
var response;
fetch(url+params).then(function decodePoll(response) {
return response.clone().json().then(function handlePoll(data) {
// Create a request object which ignores all the params to cache against
var baseurl = new URL(url);
var request = new Request(baseurl);
// If there's a hashcode, use the new one and evaluate new data.
if (data.hashcode) {
hashcode = data.hashcode;
if (cache) cache.put(request, response.clone()).catch(function (error) {
cache.delete(request).then(function () {
console.error("Failed to cache poll. Deleted stale copy from cache.", error.message);
}).catch(function (error) {
console.error("Can't replace or delete cached poll. Data Stuck.", error.message);
});
});
if (handleDataFunction) handleDataFunction(data);
statusChanged(baseurl.pathname, response);
}
actuallyPoll(hashcode);
});
}).catch(function pollError(error){
// Wait 5 second before trying again to prevent making things worse
setTimeout(function pollRetry() {
actuallyPoll(hashcode);
}, 5000);
});
}
actuallyPoll(null);
}
var modifySummary = (function () {
var summaryRequest = new Request(dataOrigin+'poll/summary');
var cachePromise = caches.open(POLL_CACHE);
// Get the summary response from the cache and JSON decode it
function getCachedSummary() {
return caches.match(summaryRequest).then(function decodeCachedSummary(response) {
if (!response) throw "No summary in cache";
return response.json();
});
}
function putCachedSummary(newData) {
var json = JSON.stringify(newData)
var summaryResponse = new Response(new Blob([json]));
return cachePromise.then(function (cache) {
return cache.put(summaryRequest, summaryResponse.clone())
.catch(function (error) {
cache.delete(summaryRequest).then(function () {
console.error("Failed to cache changes. Deleted poll from cache.", error.message);
}).catch(function (error) {
console.error("Can't alter or delete cached poll. Data Stuck.", error.message);
});
});
}).catch(function (error) {
console.error("Can't access poll cache", error.message);
}).then(function () {
statusChanged('/poll/summary', summaryResponse.clone());
});
}
// Removes the done track from the cached summary poll
function trackDone(url) {
return getCachedSummary().then(function handleCachedSummary(data) {
// Keep all tracks which aren't the done one
data.tracks = data.tracks.filter(function (track) {
return (track.url != url);
});
return putCachedSummary(data);
});
}
function play() {
return getCachedSummary().then(function handleCachedSummary(data) {
data.isPlaying = true;
return putCachedSummary(data);
});
}
function pause() {
return getCachedSummary().then(function handleCachedSummary(data) {
data.isPlaying = false;
return putCachedSummary(data);
});
}
function update(url, time) {
return getCachedSummary().then(function handleCachedSummary(data) {
// Update the time for the track specified
data.tracks.forEach(function (track) {
if (track.url == url) {
track.currentTime = time;
}
});
return putCachedSummary(data);
});
}
return {
trackDone: trackDone,
play: play,
pause: pause,
update: update,
}
})();
// Resolve any requests waiting for the new status response
function statusChanged(path, response) {
var resolve;
if (!waitingPolls[path]) return;
while (resolve = waitingPolls[path].shift()) {
resolve(response);
}
}
function preloadAllTracks(data) {
if (!data.tracks) return;
var tracks = {};
data.tracks.forEach(function (track) {
preLoadTrack(track);
tracks[track.url] = true;
});
tracksCached.tidyCache(tracks);
}
function refreshResources() {
return caches.open(RESOURCE_CACHE).then(function addUrlsToCache(cache) {
return cache.addAll(urlsToCache);
}).catch(function (error) {
console.error("Failed to cache resources:", error.message);
});
}
function forceResolvePoll(url) {
var url = new URL(url);
var request = new Request(url);
caches.match(request).then(function respond(response) {
if (response) {
statusChanged(url.pathname, response);
} else {
console.warn("No poll in cache, can't force resolve")
}
});
}
// Synchronously check which tracks are cached
var tracksCached = (function () {
var tracks = {};
var trackCache = caches.open(TRACK_CACHE);
function isCached(trackURL) {
return trackURL in tracks;
}
// Deletes all tracks from the cache whose URL isn't in the object upcomingtracks
function tidyCache(upcomingtracks) {
cacheIterate(function deleteTrack(trackurl, cache) {
var equivtrackurl = trackurl.replace("import/black/", "").replace(/%20/g, ' ');
if (!(equivtrackurl in upcomingtracks)) cache.delete(new Request(trackurl));
}).then(refresh);
}
function cacheIterate(trackFunction) {
return caches.open(TRACK_CACHE).then(function (cache) {
return cache.keys().then(function (requests) {
return requests.map(function (request) {
return trackFunction(request.url, cache);
});
});
});
}
function refresh() {
var tracksNowInCache = {};
cacheIterate(function setIsCached(trackurl) {
tracksNowInCache[trackurl] = true;
}).then(function resolve() {
tracks = tracksNowInCache;
forceResolvePoll(dataOrigin+"poll/summary");
});
}
refresh();
return {
isCached: isCached,
refresh: refresh,
tidyCache: tidyCache,
};
})();
caches.open(POLL_CACHE).catch(function (error) {
console.error('Failed to open caches', error);
}).then(function (cache) {
poll(dataOrigin+"poll/summary", preloadAllTracks, null, cache);
});
<file_sep>version: '3'
services:
seinn:
build: .
container_name: seinn
ports:
- "3001:3001"
image: lucas42/lucos_seinn<file_sep>const express = require('express');
const fs = require('fs');
const app = express();
const port = process.env.PORT || 3000;
const dataOrigin = process.env.MEDIA_MANAGER || "https://ceol.l42.eu/";
const dataOriginLine = "const dataOrigin = \""+dataOrigin.replace(/"/g, "\\\"")+"\";\n";
// Handle javascript files specially and include data origin as first line
app.get('/:file.js', function(req, res, next){
fs.readFile(__dirname + '/public'+req.path, 'utf8', function(err, contents) {
if (!contents) {
next();
return;
}
res.set('Content-Type', 'text/javascript');
res.send(dataOriginLine+contents);
});
});
app.use(express.static(__dirname + '/public', {extensions: ['json']}));
app.listen(port, function () {
console.log('App listening on port ' + port);
});<file_sep>FROM node:14
WORKDIR /usr/src/app
COPY . .
RUN npm install
ENV NODE_ENV production
ENV PORT 3001
EXPOSE $PORT
CMD [ "npm", "start" ]
|
eadc36f879b47e32505bccff92fb9d55fe57ec80
|
[
"JavaScript",
"YAML",
"Dockerfile"
] | 4 |
JavaScript
|
lucas42/lucos-seinn
|
977878b744a3234ea8e316ba471eed239d8e92bc
|
c68fd2f9be94647e7d04e6b00c8a85dc6edb6ae9
|
refs/heads/master
|
<repo_name>jerniceduncan/Team-Profile-Generator<file_sep>/lib/build_team.js
const Manager = require("./Manager");
const inquirer = require("inquirer");
const Intern = require("./Intern");
const Engineer = require('./Engineer');
const html = require('./htmlRenderer');
const fs = require('fs');
const genCard = require('./gen_cards');
const chalk = require('chalk');
let managerCard = "";
let engCard = "";
let internCard = "";
let team_cards = "";
//building a team using manager prompt//
let managerPrompt = () => {
console.log(chalk.red.bgYellowBright.underline.bold("Please Build Your Team!"));
return inquirer.prompt([{
type: "input",
name: "name",
message: chalk.cyanBright("What is your Managers Name?")
},
{
type: "input",
name: "id",
message: chalk.magentaBright("What is your Manager id?")
},
{
type: "input",
name: "email",
message: chalk.redBright("What is your managers email?")
},
{
type: "input",
name: "officeNumber",
message: chalk.blueBright("What is your managers office number?")
}
]) //Instantiating a new Manager object
.then((data) => {
let newManager = new Manager(data.name, data.id, data.email, data.officeNumber);
// console.log(newManager);
managerCard = genCard.man_card(newManager.name, newManager.id, newManager.email, newManager.officeNumber);
team_cards += managerCard;
chooseTeam();
})
}
//prompt to choose a team member
const chooseTeam = () => {
inquirer.prompt([{
type: "list",
name: "member",
message: chalk.yellowBright("Which type of team member would you like to add?"),
choices: ["engineer", "Intern", "I don't want to add any more team members"]
}])
.then((data) => {
if (data.member === "engineer") {
buildEngineer();
} else if (data.member === "Intern") {
buildIntern();
} else {
console.log(chalk.magentaBright.bgWhiteBright.underline.bold("Check the output folder for your generated team!"));
const outputPage = html.htmlRender(team_cards);
fs.writeFile("./output/new_team.html", outputPage, (err) => {
if (err) throw err;
//
});
}
})
}
//prompt to build an intern
const buildIntern = () => {
inquirer.prompt([{
type: "input",
name: "internName",
message: chalk.cyanBright("What is your Interns Name?")
},
{
type: "input",
name: "internId",
message: chalk.magentaBright("What is your Interns id?")
},
{
type: "input",
name: "internEmail",
message: chalk.redBright("What is your Interns email?")
},
{
type: "input",
name: "internSchool",
message: chalk.blueBright("Where did your Intern go to School?")
}
])
.then((data) => {
let newIntern = new Intern(data.internName, data.internId, data.internEmail, data.internSchool);
// console.log(newIntern);
internCard = genCard.genInternCard(newIntern.name, newIntern.id, newIntern.email, newIntern.school);
team_cards += internCard;
chooseTeam();
})
}
// Prompt to Build Engineer
const buildEngineer = () => {
inquirer.prompt([{
type: "input",
name: "engineerName",
message: chalk.cyanBright("What is your Engineers Name?")
},
{
type: "input",
name: "engineerId",
message: chalk.magentaBright("What is your Engineers id?")
},
{
type: "input",
name: "engineerEmail",
message: chalk.redBright("What is your Engineers email?")
},
{
type: "input",
name: "engineerGitHub",
message: chalk.blueBright("What is your Engineers Github userName?")
}
])
.then((data) => {
let newEngineer = new Engineer(data.engineerName, data.engineerId, data.engineerEmail, data.engineerGitHub);
engCard = genCard.genEngineerCard(newEngineer.name, newEngineer.id, newEngineer.email, newEngineer.github);
team_cards += engCard;
chooseTeam();
})
}
module.exports =
managerPrompt,
buildEngineer,
buildIntern;<file_sep>/README.md
# Team Profile Generator
[]()
---
## 🧐 About
A software engineering team generator command line application. The application will prompt the user for information about the team manager and then information about the team members. The user can input any number of team members, and they may be a mix of engineers and interns. This assignment must also pass all unit tests. When the user has completed building the team, the application will create an HTML file that displays a nicely formatted team roster based on the information provided by the user.
[]()
## Demo
[alt text](https://github.com/jerniceduncan/Team-Profile-Generator/blob/master/Assets/TPG.gif)
---
## 🏁 Getting Started
- Clone Repo to your machine.
- Navagate to file.
- Open Bash command line in the file.
### Prerequisites
What things you need to install the software and how to install them.
- Google Chrome
- Git Bash
- Node.js
### Installing
- npm install.
- node server.js
## ⛏️ Built Using
- [Express](https://expressjs.com/) - Server Framework
-
- [NodeJs](https://nodejs.org/en/) - Server Environment
## ✍️ Author
- [@jerniceduncan](https://github.com/jerniceduncan)
|
1db47c329a2798f8894cfbbba28a0f05b301ecbd
|
[
"JavaScript",
"Markdown"
] | 2 |
JavaScript
|
jerniceduncan/Team-Profile-Generator
|
e49c9be97ae6c5f61f82f7ede70009530353073d
|
1d755ebecae97926605c3180cb75e171ad085461
|
refs/heads/master
|
<repo_name>jupiter323/react-native-rsa-qrcode-CRM<file_sep>/src/modules/LoginModule/index.js
import FirstScreen from './FirstScreen'
import LoginForm from './LoginForm'
import SignupForm from './SignUpForm'
export {
FirstScreen,
LoginForm,
SignupForm
}<file_sep>/src/modules/LoginModule/FirstScreen/style.js
import { Dimensions } from 'react-native'
const { height, width } = Dimensions.get('window');
const styles = {
container: {
flex: 1,
justifyContent: 'center',
alignItems: 'center',
},
background:{
flex:1,
width:width,
height:height
},
logoimg: {
resizeMode: 'contain',
marginTop: height / 3,
marginBottom: height / 50,
},
btn:{
width:width*1/2,
backgroundColor:'rgba(255, 78, 0, 1)',
borderRadius:25,
marginVertical:height/15,
paddingVertical:height/50
},
btnText: {
fontSize:30,
fontWeight: 'bold',
color: '#ffffff',
textAlign: 'center'
},
}
export default styles;<file_sep>/src/modules/LoginModule/LoginForm/index.js
import React, { Component } from 'react'
import { View, TextInput, Text, Image, TouchableHighlight, TouchableOpacity, ImageBackground, StatusBar } from 'react-native'
import { Icon, Container } from "native-base"
import { Button, Content } from "native-base"
import styles from './style'
// import logname from 'assets/images/logoname.png'
import BG from 'assets/images/bg.png'
export default class HomeScreen extends React.Component {
login() {
this.props.navigation.navigate('PreScan')
}
signup() {
this.props.navigation.navigate('Signup')
}
forgot() {
}
render() {
return (
<Container>
<StatusBar hidden={true}></StatusBar>
<Content>
<ImageBackground
style={styles.background}
source={BG} >
<View style={styles.container}>
{/* <Image style={styles.logoimg} source={logname}></Image> */}
<View style={styles.logoimg}></View>
<View style={styles.textcontainStyle}>
<Icon style={styles.iconstyle} type="EvilIcons" name={"envelope"} size={20} />
<TextInput style={styles.inputStyle}
placeholder='E-mail'
onChangeText={this.handleTextChange}
secureTextEntry={this.props.secureTextEntry}
placeholderTextColor='white'
underlineColorAndroid='transparent'
/>
</View>
<View style={styles.textcontainStyle}>
<Icon style={styles.iconstyle} type="EvilIcons" name={"lock"} size={20} />
<TextInput style={styles.inputStyle}
placeholder='Password'
onChangeText={this.handleTextChange}
secureTextEntry={this.props.secureTextEntry}
placeholderTextColor='white'
underlineColorAndroid='transparent'
secureTextEntry={true}
/>
</View>
<Button
style={styles.gobuttonStyle}
onPress={() => { this.login() }}>
<Text style={styles.gobuttonTextStyle}>{"Login"}</Text>
<Icon style={styles.goiconStyle} type='Feather' name="chevrons-right" size={20} />
</Button>
<Button
style={styles.gobuttonStyle}
onPress={() => { this.forgot() }}>
<Text style={styles.gobuttonTextStyle}>{"Forgot Password"}</Text>
<Icon style={styles.goiconStyle} type="EvilIcons" name="lock" size={20} />
</Button>
<Text style={styles.altsignup}>
{"Don't have an account yet?"}
</Text>
<Button
style={styles.buttonStyle}
onPress={() => { this.signup() }}>
<Icon style={styles.iconStyle} type='Feather' name="user-plus" size={20} />
<Text style={styles.buttonTextStyle}>{"Sign Up Now"}</Text>
</Button>
</View>
</ImageBackground>
</Content>
</Container>
);
}
}<file_sep>/src/modules/ScanBeforemodule/style.js
import { Dimensions } from 'react-native'
const { height, width } = Dimensions.get('window');
const styles = {
container: {
flex: 1,
width:'100%',
justifyContent: 'center',
alignItems: 'center',
},
background: {
flex: 1,
width: width,
height: height
},
logoimg: {
resizeMode: 'contain'
},
btn: {
},
btnText: {
fontSize: 30,
fontWeight: 'bold',
color: '#ffffff',
textAlign: 'center'
},
}
export default styles;<file_sep>/keygenerator.js
var RSA = require('hybrid-crypto-js').RSA;
var Crypt = require('hybrid-crypto-js').Crypt;
var rsa = new RSA();
var crypt = new Crypt();
// Generate RSA key pair, defaults on 4096 bit key
rsa.generateKeypair(function(keypair) {
// Callback function receives new keypair as a first argument
// var publicKey = keypair.publicKey;
// var privateKey = keypair.privateKey;
var publicKey = "-----<KEY>"
// console.log(publicKey, privateKey);
var message = JSON.stringify({productId:"p1", category:"c1"});
// Encryption with one public RSA key
var encrypted = crypt.encrypt(publicKey, message);
console.log(encrypted); //string
},1024);
<file_sep>/src/modules/LoginModule/FirstScreen/index.js
import React, { Component } from 'react'
import { View, Text, Image, TouchableOpacity, ImageBackground } from 'react-native'
import styles from './style'
import img from 'assets/images/icon.png'
import background_img from 'assets/images/bg.png'
export default class HomeScreen extends React.Component {
login() {
this.props.navigation.navigate('Login')
}
render() {
return (
<View style={styles.container}>
<View style={{ flex: 1 }}>
<ImageBackground
style={styles.background}
source={background_img} >
<View style={styles.container}>
<Image
style={styles.logoimg}
source={img}
/>
<TouchableOpacity style={styles.btn} onPress={() => { this.login() }}>
<Text style={styles.btnText}>Get Start</Text>
</TouchableOpacity>
</View>
</ImageBackground>
</View>
</View>
);
}
}
|
4f6bc530da468812161fc1a0edfd4a00def1dfd0
|
[
"JavaScript"
] | 6 |
JavaScript
|
jupiter323/react-native-rsa-qrcode-CRM
|
b254668c2902356b2210808d6291608efaa3c7c3
|
8b84a0756ff108d9d5ab1201c4d0e92a215aaf27
|
refs/heads/main
|
<repo_name>kabirvirji/lc-related-topics<file_sep>/foreground.js
var topic_elements = document.getElementsByClassName("tag__2PqS") // keeps getting redefined on every open
var topics = []
const set_topics = (topics, topic_elements) => {
for (let i = 0; i < topic_elements.length; i++) {
topics.push(topic_elements[i].innerText)
}
console.log(topics)
chrome.storage.local.set({topics: topics}, function() {
console.log(topics);
})
}
set_topics(topics, topic_elements)<file_sep>/README.md
# lc-related-topics<file_sep>/popup.js
var topics = null
chrome.tabs.executeScript(null, {file: './foreground.js'}, () => {
console.log('injected')
chrome.storage.local.get(['topics'], function(result) {
console.log('Value currently is ' + result.topics[1]);
topics = result.topics
console.log(topics)
});
})
document.getElementById('submit').onclick = function check() {
var checked_topics = []
var correct = 1
var checkboxes = document.querySelectorAll('input[type=checkbox]:checked')
for (var i = 0; i < checkboxes.length; i++) {
checked_topics.push(checkboxes[i].value)
}
console.log(checked_topics, topics)
if (checked_topics.length != topics.length) {
alert(false)
} else {
// check all elements
for (var j = 0; j < checked_topics.length; j++) {
if (!topics.includes(checked_topics[j])) {
alert(false)
correct = 0
break
}
}
if (correct == 1) {
alert(true)
}
}
}
|
2e09bc2f42ec5be92117e0468b6e52cf5c46b3f2
|
[
"JavaScript",
"Markdown"
] | 3 |
JavaScript
|
kabirvirji/lc-related-topics
|
8411523a42a26aac291904bedc6a8fca8afbc643
|
7d15b0f477b266e3014032ee16848cea05afc0ff
|
refs/heads/master
|
<repo_name>stefanJi/GradlePluginDevelop<file_sep>/source/android_apk_optimization.md
###apk瘦身
- 1.使用一套图资源
对于绝大对数APP来说,只需要取一套设计图就足够了。鉴于现在分辨率的趋势,建议取720p的资源,放到xhdpi目录。
相对于多套资源,只使用720P的一套资源,在视觉上差别不大,很多大公司的产品也是如此,但却能显著的减少资源占用大小,顺便也能减轻设计师的出图工作量了
- 2.开启minifyEnabled混淆代码
- 3.开启shrinkResources去除无用资源
- 4.删除无用的语言资源(大多数情况仅仅需要中文)
android{
defaultConfig{
resConfigs "zh"
}
}
- 5.使用tinypng有损压缩
android打包本身会对png进行无损压缩,所以使用像tinypng这样的有损压缩是有必要的。 重点是Tinypng使用智能有损压缩技术,以尽量少的失真换来图片大小的锐减,效果非常好,强烈推荐。
Tinypng的官方网站:http://tinypng.com/ 拖进去就可以
- 6.对于非透明的大图使用jpg格式
jpg将会比png的大小有显著的优势,虽然不是绝对的,但是通常会减小到一半都不止。在启动页,活动页等之类的大图展示区采用jpg将是非常明智的选择。
- 7.使用webp格式(系统版本需要兼容)
webp支持透明度,压缩比比jpg更高但显示效果却不输于jpg,官方评测quality参数等于75均衡最佳。
相对于jpg、png,webp作为一种新的图片格式,限于android的支持情况暂时还没用在手机端广泛应用起来。从Android 4.0+开始原生支持,但是不支持包含透明度,直到Android 4.2.1+才支持显示含透明度的webp,使用的时候要特别注意。
官方介绍:https://developers.google.com/speed/webp/docs/precompiled
- 8.shape替换一些背景
- 9.selector 处理不同颜色图片使用着色方案
- 10.覆盖第三方库里的大图1x1
有些第三方库引用了一些大图,但实际上并不会并我们用到,第三方的图片通常会拷贝到我的项目中,因此我们可以使用同名1X1像素透明图片进行覆盖
- 11.第三方库避免重复引用
- 12.在线资源应用
- 13.插件化
- 14.删除armable-v7下so
- 15.使用7z极限压缩图片<file_sep>/source/plugin_develop.md
- 1. 创建一个Android Library类型的Moudle;例如mygradleplugin
- 2. 将此Module的java修改改成groovy
- 3. 包名自己可以修改
- 4. 此Moudle下build.gradle内容清空,添加如下代码
apply plugin: 'groovy'
apply plugin: 'maven'
dependencies {
compile gradleApi()
compile localGroovy()
}
repositories {
mavenCentral()
}
// group和version在后面使用自定义插件的时候会用到 可以随便起
group='com.micky'
version='1.0.0'
// 上传本地仓库的task,
uploadArchives {
repositories {
mavenDeployer {
//本地仓库的地址,自己随意选,但使用的时候要保持一致,这里就是当前项目目录
repository(url: uri('../repo'))
}
}
}
- 5. 在groovy路径下创建一个MyCustomPlugin.groovy,新建文件一定要带后缀名
class MyCustomPlugin implements Plugin<Project> {
void apply(Project project) {
System.out.println("这是自定义插件");
project.task('myTask') << {
println "Hi this is micky's plugin"
}
}
}
- 6. 现在,我们已经定义好了自己的gradle插件类,接下来就是告诉gradle,哪一个是我们自定义的插件类,因此,需要在main目录下新建resources目录,然后在resources目录里面再新建META-INF目录,再在META-INF里面新建gradle-plugins目录。最后在gradle-plugins目录里面新建properties文件,注意这个文件的命名,你可以随意取名,(这里起名com.micky.mycustom.properties)但是后面使用这个插件的时候,会用到这个名字。比如,你取名为com.hc.gradle.properties,而在其他build.gradle文件中使用自定义的插件时候则需写成:apply plugin: 'com.hc.gradle'
- 7. 然后在com.micky.mycustom.properties文件里面指明你自定义的类
implementation-class='com.micky.plugin.MyCustomPlugin
- 8. 执行 gradle uploadArchives 上传到本地仓库会生成jar
##### 然后在项目的app目录下的build.gradle 使用插件
//引入依赖
buildscript {
repositories {
maven {
url uri('../repo')
}
jcenter()
}
//这里和插件定义要一致,插件中name未指定就为默认项目名
dependencies {
classpath group: 'com.micky',
name: 'mygradleplugin',
version: '1.0.1'
}
}
//这个名字一定要对应上.properties文件名
apply plugin: 'com.micky.mycustom'
- 最后 先clean project(很重要!),然后再make project.从messages窗口打印如下信息
<file_sep>/source/applicationVariant.gradle
//1重命名
android{
applicationVariants.all { variant ->
variant.outputs.all { output ->
outputFileName = getProductName() + "_v${defaultConfig.versionName}_${releaseTime()}" + '_release.apk'
}
}
}
//2.假设,我想设置一个applicationId给branchTwo和distrib结合的variant,我可以在build.gradle里面这样写:
applicationVariants.all { variant ->
def mergedFlavor = variant.mergedFlavor
switch (variant.flavorName) {
case "brancheTwoDistrib":
mergedFlavor.setApplicationId("com.example.oldNameBranchTwo")
break
}
}
//3.有时某些buildTypes-flavor结合没有意义,我们想告诉Gradle不要生成这些variants,只需要用variant filter就可以做到
variantFilter { variant ->
if (variant.buildType.name.equals('release')) {
variant.setIgnore(!variant.getFlavors().get(1).name.equals('distrib'));
}
if (variant.buildType.name.equals('debug')) {
variant.setIgnore(variant.getFlavors().get(1).name.equals('distrib'));
}
}
//在上面的代码中,我们告诉Gradle buildType=debug不要和flavor=distrib结合而buildType=release只和flavor=distrib结合,生成的Variants
//4.修改生成apk路径和应用名字
applicationVariants.all { variant ->
variant.getPackageApplication().outputDirectory = new File(project.rootDir.absolutePath +'/apk/'+DeviceType)
variant.outputs.all { output ->
if(WPOS_APP_openAdb==1){
outputFileName = 'WangposService_3-release_adbIsOpen'+'.apk'
}else{
outputFileName = 'WangposService_3-release'+'.apk'
}
}
}<file_sep>/source/initServiceLoader.gradle
ext {
baseDevConfigs = [
'InstallManager'
,
'InstallManager'
,
'InstallManager'
,
'InstallManager'
,
'InstallManager'
]
}
/**
* 生成新的ServiceLoader.java文件内容
* @return
*/
def initServiceLoaderFile() {
def configFile = "${projectDir}/src/main/java/config/ServiceLoader.java"
File file = new File(configFile);
def oldlines = file.readLines();
def newlines = []
int startIndex = -1;
int i = 0;
def expConfig = "configs.put(";
def packageIndex = 0;
for (line in oldlines) {
if (!line.contains("configs.put")) {
newlines.add(line)
}
if (line.contains("package")) {
packageIndex = i;
}
if (line.contains("public ServiceLoader(){")){
startIndex = i;
}
i++;
}
if (!newlines.contains("import com.wangpos.test.inter.*;")) {
newlines.add(packageIndex + 1, "import com.wangpos.test.inter.*;");
}
//拼接默认配置内容
for (line in baseDevConfigs) {
def instance = " configs.put(${line}.class,${line}Impl.class);";
newlines.add(startIndex+1, instance);
}
PrintWriter writer = file.newPrintWriter("UTF-8")
newlines.each { line -> writer.println(line) }
writer.close()
}
initServiceLoaderFile()<file_sep>/source/why_android_16ms.md
###为什么Android系统要求16ms完成一帧绘制
1.人类的眼睛可以识别的最大帧率60hz:
看一个人百米赛跑的时候,运动员是一步一步的跑过去,我们之所以看见运动员在跑,是因为不间断的在向我们眼睛里传送图片信息,串联起来就形成了一幅动态图,运动员跑的过程可以细分成无数的图片信息,但是由于人类的极限,我们不能看清他的所有动作,我们所能看到的极限是什么?就是1秒内可以看清60个图片,所以我们1秒内运动员只传来接近60个差别图片的话,我们可以看清的,当然由于运动员速度很快,100秒也就是10秒,100米运动员动作很多,在这100米内我们只能看清600个动作,所以细微小的抬腿动作肯定是无法区分的,所以我们看清一个图片的时间最快是1/60. = 0.016 m = 16ms
2.Android系统清除人类眼睛识别的极限,要求每一帧都要在 16ms 内绘制完成,也就是1秒绘制62.5次;
也就是1秒62.5帧,通常人眼睛可以识别的最大就是1秒60帧,再大识别不出来
<file_sep>/source/createJavaCode.gradle
def initMyBuildConfig(key,value) {
def configFile = "${projectDir}/src/cn/weipass/util/config/build/BuildConfigDefault.java"
def encoding = "UTF-8"
def lines = []
new File(configFile).newReader(encoding).eachLine { line ->
lines.add( replaceStringMemberValue(line,key,value) )
}
BufferedWriter writer = new File(configFile).newWriter(encoding)
lines.each{ line -> writer.writeLine(line) }
writer.close()
}<file_sep>/source/assembleWithParams.md
#### 终端打包传入参数
在很多打包场景为了避免修改代码,通过命令行方式传递参数是很方便
- 参数名字前加P,每个参数用空格隔开
./gradlew assembleRelease -PTestName='hello' -Pcustom=2
- 在代码中如何使用呢,看下面:
def custom = project.hasProperty('custom')?custom:'defaultCustom'
def testName = project.hasProperty('TestName')?TestName:'defaultName'
//println(custom)
//println(testName)
<file_sep>/source/day03.gradle
//Android 项目通过 settings.gradle 完成模块配置
//在Gradle众多工程是通过工程树表示的,相当于我们在Android Studio看到的Project和Module概念一样。
// 根工程相当于Android Studio的Project,一个根工程可以有很多子工程,也就是很多Module
//include ':app', ':mygradleplugin', ':mylibrary'
<file_sep>/source/packageOption.gradle
//
//当你的程序需要引用两个及以上的module library或者其他的jar包是出现错误类型如下
//Error:Execution failed for task ':app:transformResourcesWithMergeJavaResForDebug'.
// > com.android.build.api.transform.TransformException: com.android.builder.packaging.DuplicateFileException: Duplicate files copied in APK org/apache/log4j/xml/log4j.dtd
//File1: /Users/minyuchun/androidwork/projectstudio/StarFaceFrame/app/build/intermediates/exploded-aar/StarFaceFrame/facelibrary/unspecified/jars/classes.jar
//File2: /Users/minyuchun/androidwork/projectstudio/StarFaceFrame/app/build/intermediates/exploded-aar/StarFaceFrame/rylibrary/unspecified/jars/classes.jar
android{
packagingOptions{
exclude 'org/apache/log4j/xml/log4j.dtd'
}
}
<file_sep>/source/exeRank.md
###Gradle 脚本执行顺序
- gradle的解析顺序:rootproject 的setting.gradle,然后是rootproject的build.gradle,然后是各个subproject。所以project下的build.gradle会先于app下的build.gradle。
- 在build.gradle中,我们可以通过apply plugin: 引入插件,也可以通过 apply from .gradle引入其他gradle脚本中的函数定义或task等
#### 一般hook,我们指的是gradle的生命周期:
- 在解析setting.gradle之后,开始解析build.gradle之前,这里如果要干些事情(更改build.gradle校本内容),可以写在beforeEvaluate
举个例子,我们将我们的一个subproject中的apply plugin改掉,原来是一个library工程,我们希望它被当作application处理:
project.beforeEvaluate {
// Change android plugin from `lib' to `application' dynamically
// FIXME: Any better way without edit file?
if (mBakBuildFile.exists()) {
// With `tidyUp', should not reach here
throw new Exception("Conflict buildFile, please delete file $mBakBuildFile or " +
"${project.buildFile}")
}
def text = project.buildFile.text.replaceAll(
'com\\.android\\.library', 'com.android.application')
project.buildFile.renameTo(mBakBuildFile)
project.buildFile.write(text)
}
- 在所有build.gradle解析完成后,开始执行task之前,此时所有的脚本已经解析完成,task,plugins等所有信息可以获取,task的依赖关系也已经生成,如果此时需要做一些事情,可以写在afterEvaluate
project.afterEvaluate {
// Set application id
def manifest = new XmlParser().parse(project.android.sourceSets.main.manifestFile)
project.android.defaultConfig.applicationId = manifest.@package
}
**每个task都可以定义doFirst,doLast,用于定义在此task执行之前或之后执行的代码**
project.assemble.doLast {
println "assemble finish"
}
project.assemble.doFirst {
println "assemble start"
}
<file_sep>/source/day04.gradle
// Gradle 插件分类
//使用了 com.android.application 插件 ,该插件用于编译 Android 应用。
apply plugin: 'com.android.application'
//com.android.library,该插件用于编译Android lib
apply plugin:'com.android.library'
//com.android.test,该插件用于测试Android项目
apply plugin:'com.android.test'<file_sep>/source/initConfig.gradle
def replaceFirstIfTaged(str, tag, src, dest) {
if (str.contains(tag)) {
println "** ${tag} = ${dest};"
return str.replaceFirst(src, "${dest}")
}
return str
}
def replaceInt(key, src, value) {
def configFile = "${projectDir}/src/main/java/config/BuildConfigDefault.java"
def encoding = "UTF-8"
def lines = []
new File(configFile).newReader(encoding).eachLine { line ->
lines.add(replaceFirstIfTaged(line, key, src, value))
}
BufferedWriter writer = new File(configFile).newWriter(encoding)
lines.each { line -> writer.writeLine(line) }
writer.close()
}
/**
* 替换整数
* @param key
* @param value
* @return
*/
def initConfigInt(key, value) {
replaceInt(key, /\d+/, value)
}
//替换代码中字符串变量的值
def replaceStringMemberValue(str, tag, dest) {
if (str.contains(tag)) {
println "** ${tag} = ${dest};"
str = str.substring(0, str.indexOf("\"") + 1);
str = str + dest + "\";";
return str
}
return str
}
def initConfigString(key, value) {
println("'projectDir=" + projectDir)///Users/qiyue/GitProject/GradlePluginWS/GradlePlugin/app
def configFile = "${projectDir}/src/main/java/config/BuildConfigDefault.java"
def encoding = "UTF-8"
def lines = []
new File(configFile).newReader(encoding).eachLine { line ->
lines.add(replaceStringMemberValue(line, key, value))
}
BufferedWriter writer = new File(configFile).newWriter(encoding)
lines.each { line -> writer.writeLine(line) }
writer.close()
}
initConfigString("name", "configName")
initConfigInt("isOpen", 4)
<file_sep>/source/assemble.md
### gradle命令
一般是./gradlew +参数, gradlew代表 gradle wrapper,意思是gradle的一层包装,大家可以理解为在这个项目本地就封装了gradle,即gradle wrapper, 在gradle/wrapper/gralde-wrapper.properties文件中声明了它指向的目录和版本。只要下载成功即可用grdlew wrapper的命令代替全局的gradle命令。
./gradlew -v 版本号
./gradlew clean 清除app目录下的build文件夹
./gradlew build 检查依赖并编译打包
./gradlew tasks 列出所有task
这里注意的是 ./gradlew build 命令把debug、release环境的包都打出来,如果正式发布只需要打Release的包,该怎么办呢,下面介绍一个很有用的命令 assemble, 如:
### 过滤构建类型打包
./gradlew assembleDebug 编译并打Debug包
./gradlew assembleRelease 编译并打Release的包
### 过滤项目打包
除此之外,assemble还可以和productFlavors结合使用:
./gradlew installRelease Release模式打包并安装
./gradlew uninstallRelease 卸载Release模式包
assemble结合Build Variants来创建task
assemble 还能和 Product Flavor 结合创建新的任务,其实 assemble 是和 Build Variants 一起结合使用的,而 Build Variants = Build Type + Product Flavor,举个例子大家就明白了:
如果我们想打包 wandoujia 渠道的release版本,执行如下命令就好了:
./gradlew assembleWandoujiaRelease
如果我们只打wandoujia渠道版本,则:
./gradlew assembleWandoujia
此命令会生成wandoujia渠道的Release和Debug版本
同理我想打全部Release版本:
./gradlew assembleRelease
这条命令会把Product Flavor下的所有渠道的Release版本都打出来。
总之,assemble 命令创建task有如下用法:
assemble<Variant Name>: 允许直接构建一个Variant版本,例如assembleFlavor1Debug。
assemble<Build Type Name>: 允许构建指定Build Type的所有APK,例如assembleDebug将会构建Flavor1Debug和Flavor2Debug两个Variant版本。
assemble<Product Flavor Name>: 允许构建指定flavor的所有APK,例如assembleFlavor1将会构建Flavor1Debug和Flavor1Release两个Variant版本。<file_sep>/source/sdkVersionType.md
- compileSdkVersion 26
告诉 Gradle 用哪个 Android SDK 版本编译你的应用,当你修改了 compileSdkVersion 的时候,
可能会出现新的编译警告、编译错误,但新的 compileSdkVersion 不会被包含到 APK 中:它纯粹只是在编译的时候使用。
compileSdkVersion 通常我们使用最新的,在编译的时候检查代码的错误和警告,提示开发者修改和优化
- buildToolsVersion "26.0.1"
表示构建工具的版本号,这个属性值对应 AndroidSDK 中的 Android SDK Build-tools,
正常情况下 build.gradle 中的 buildToolsVersion 跟你电脑中 Android SDK Build-tools 的最新版本是一致的
- minSdkVersion 15
应用可以运行的最低要求,app运行所需的最低sdk版本.低于minSdkVersion的手机将无法安装.
- targetSdkVersion 26
minSdkVersion和targetSdkVersion相信非常多人都不太理解。我在网上也看了很多关于这两者差别的文章,感觉说的都非常模糊。直到我在stackOverFlow看到Android Min SDK Version vs. Target SDK Version这篇文章后,我才最终弄清楚怎样去设置minSdkVersion和targetSdkVersion。如今我将分享给大家。
简言之,这篇文章的目的是为了区分minSDK和targetSDK,这两者相当于一个区间。你能够用到targetSDK中最新的API和最酷的新功能,但你又不得不向下兼容到minSDK,保证这个区间内的设备都能够正常的执行你的app。换句话说,你想使用Android刚刚推出的新特性。但这对于你的app又不是必须的。你就能够将targetSDK设置为你想使用新特性的SDK版本号,minSDK设置成低版本号保证全部人都能够使用你的app。
<file_sep>/app/src/main/java/com/wangpos/test/MainActivity.java
package com.wangpos.test;
import android.arch.lifecycle.Observer;
import android.os.Build;
import android.support.annotation.Nullable;
import android.support.annotation.RequiresApi;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import com.wangpos.test.base.BaseActivity;
import com.wangpos.test.work.UploadWork;
import java.util.concurrent.TimeUnit;
import androidx.work.Constraints;
import androidx.work.Data;
import androidx.work.NetworkType;
import androidx.work.OneTimeWorkRequest;
import androidx.work.PeriodicWorkRequest;
import androidx.work.State;
import androidx.work.WorkManager;
import androidx.work.WorkStatus;
public class MainActivity extends BaseActivity implements View.OnClickListener {
private Presenter presenter;
private Button btnRequestNetWrok;
private Button btnPeriod;
@RequiresApi(api = Build.VERSION_CODES.M)
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
Log.i("info","onCreate start");
Log.i("info","onCreate end");
// /Users/qiyue/GitProject/GradlePlugin/app/build/intermediates/classes/debug
presenter = new Presenter();
if (BuildConfig.isDebug){
toast(BuildConfig.FLAVOR+"debug版本");
}
// WorkManager.getInstance().cancelWorkById(oneTimeWorkRequest.getId());
btnRequestNetWrok = (Button) findViewById(R.id.btn_request_network);
btnRequestNetWrok.setOnClickListener(this);
btnPeriod = (Button) findViewById(R.id.btn_Period_work);
btnPeriod.setOnClickListener(this);
}
@Override
protected void onResume() {
super.onResume();
presenter.onResume();
}
@Override
public void onClick(View v) {
switch (v.getId()){
case R.id.btn_request_network:
exeWorkByNetWork();
break;
case R.id.btn_Period_work:
exePeriodWork();
break;
}
}
/**
* 模拟一个网络请求
*/
private void exePeriodWork() {
//notice 创建约束条件
Constraints constraints = new Constraints.Builder().setRequiredNetworkType(NetworkType.CONNECTED)
.build();
//notice 输入数据
Data inputData = new Data.Builder().putBoolean("isTest", true).build();
//notice 执行定时任务:设置工作的时间间隔
PeriodicWorkRequest periodicWorkRequest = new PeriodicWorkRequest.Builder(UploadWork
.class, 10, TimeUnit.MINUTES).addTag(UploadWork.TAG)
.setConstraints(constraints).setInputData(inputData).build();
WorkManager.getInstance().cancelWorkById(periodicWorkRequest.getId());
//notice 设置结果回调
WorkManager.getInstance().getStatusById(periodicWorkRequest.getId())
.observe(this, new Observer<WorkStatus>() {
@Override
public void onChanged(@Nullable WorkStatus workStatus) {
Log.i("result","state="+workStatus.getState());
if (workStatus != null && workStatus.getState() == State.SUCCEEDED) {
//notice 取出回调数据
String result = workStatus.getOutputData().getString("result", "");
Log.i("result","workStatus="+workStatus.getState());
toast(result);
}
}
});
//notice 执行
WorkManager.getInstance().enqueue(periodicWorkRequest);
/**
* 你也可以让多个任务按顺序执行:
WorkManager.getInstance(this)
.beginWith(Work.from(LocationWork.class))
.then(Work.from(LocationUploadWorker.class))
.enqueue();
你还可以让多个任务同时执行:
WorkManager.getInstance(this).enqueue(Work.from(LocationWork.class,
LocationUploadWorker.class));
*/
}
/**
* 模拟一个网络请求
*/
private void exeWorkByNetWork() {
//notice 创建约束条件
Constraints constraints = new Constraints.Builder().setRequiredNetworkType(NetworkType.CONNECTED)
.build();
//notice 输入数据
Data inputData = new Data.Builder().putBoolean("isTest", true).build();
//notice 构建请求类型,一次的请求
OneTimeWorkRequest oneTimeWorkRequest = new OneTimeWorkRequest.Builder(
UploadWork.class
).setConstraints(constraints).setInputData(inputData).build();
//notice 设置结果回调
WorkManager.getInstance().getStatusById(oneTimeWorkRequest.getId())
.observe(this, new Observer<WorkStatus>() {
@Override
public void onChanged(@Nullable WorkStatus workStatus) {
if (workStatus != null && workStatus.getState() == State.SUCCEEDED) {
//notice 取出回调数据
String result = workStatus.getOutputData().getString("result", "");
Log.i("result","workStatus="+workStatus.getState());
toast(result);
}
}
});
//notice 执行
WorkManager.getInstance().enqueue(oneTimeWorkRequest);
/**
* 你也可以让多个任务按顺序执行:
WorkManager.getInstance(this)
.beginWith(Work.from(LocationWork.class))
.then(Work.from(LocationUploadWorker.class))
.enqueue();
你还可以让多个任务同时执行:
WorkManager.getInstance(this).enqueue(Work.from(LocationWork.class,
LocationUploadWorker.class));
*/
}
private void cancel(){
// WorkManager.getInstance().cancelWorkById(oneTimeWorkRequest.getId());
}
}
<file_sep>/source/makeJar.md
###makeJar 打包Jar 可以通过from指定要打包的目录文件就是.class文件
####注意
- 一定要依赖于build 否则打出来的jar包就是空的
- 在过滤打包字节码文件时,被删除的文件可以被其他类引用,但不要被调用或初始化,否者就会报错
task makeSdkJar(type:org.gradle.api.tasks.bundling.Jars, dependsOn: 'build'){
baseName 'pluginsdk'
//只打包org.cmdmac下的org.cmdmac.pluginsdk.impl和org.cmdmac.gamecenter,其他子包不会被打包进去
from('build/intermediates/classes/debug/org/cmdmac/'){
include'pluginsdk/impl'
include'gamecenter'
}
into('org/cmdmac/')
// exclude('R.class') 可以过滤不需要打包的文件
// exclude{ it.name.startsWith('R$');}
}
task makeSdkJar(type:org.gradle.api.tasks.bundling.Jar){
baseName 'pluginsdk'
//只打包org.cmdmac下的org.cmdmac.pluginsdk.impl和org.cmdmac.gamecenter,其他子包不会被打包进去
from('build/intermediates/classes/debug/org/cmdmac/'){
include'pluginsdk/impl'
include'gamecenter'
}
into('org/cmdmac/')
// exclude('R.class')
// exclude{ it.name.startsWith('R$');}
}
<file_sep>/app/src/main/java/config/ServiceLoader.java
package config;
import com.wangpos.test.inter.*;
import java.util.HashMap;
import java.util.Map;
/**
* Created by qiyue on 2018/5/30.
*/
public class ServiceLoader {
Map<Object,Object> configs = new HashMap<>();
public ServiceLoader(){
configs.put(InstallManager.class,InstallManagerImpl.class);
configs.put(InstallManager.class,InstallManagerImpl.class);
configs.put(InstallManager.class,InstallManagerImpl.class);
configs.put(InstallManager.class,InstallManagerImpl.class);
configs.put(InstallManager.class,InstallManagerImpl.class);
}
}
<file_sep>/source/ui_optimization.md
### Android界面优化
- [为什么Android系统要求16ms完成一帧绘制?](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/why_android_16ms.md)
- [16ms对于系统干了那些事?](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/16ms_do.md)
- [具体优化工具和优化方法](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/optim_list.md)
<file_sep>/source/day02.md
### Gradle版本
Gradle 目录下 - wrapper - gradle-wrapper.properties
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-2.14.1-all.zip
通常我们 只需要修改distributionUrl就可以了,比如导入其他人的项目通常卡住,其实是因为本地没有对应的gradle版本,正在联网下载,因此我们只需要在导入前改这个文件,查找本地已经有的版本就可以了
本地版本一般在用户目录.gradle
Wrapper是对Gradle的一层包装,便于在团队开发过程中统一Gradle构建的版本号,这样大家都可以使用统一的Gradle版本进行构建
### Android 项目构建还需要引入Gradle插件,下面是配置插件版本(默认创新新项目插件版本就是当前AndroidStudio版本)
```gradle
buildscript {
repositories {
jcenter()
}
dependencies {
classpath 'com.android.tools.build:gradle:1.5.0'
// NOTE: Do not place your application dependencies here; they belong
// in the individual module build.gradle files
}
}
```
### Plugin for Gradle版本 和 Gradle版本 之间的版本对应关系
Gradle Plugin version | Required Gradle version
---|---
1.0.0 - 1.1.3 | 2.2.1 - 2.3
1.2.0 - 1.3.1 | 2.2.1 - 2.9
1.5.0 2.2.1 | 2.13
2.0.0 - 2.1.2 | 2.10 - 2.13
2.1.3 - 2.2.3 | 2.14.1+
2.3.0+ | 3.3+
<file_sep>/source/custom_view_optimization.md
### 针对自定义View优化:
1. 针对自定义View,我们可能犯下面三个错误:
Useless calls to onDraw():我们知道调用View.invalidate()会触发View的重绘,有两个原则需要遵守,第1个是仅仅在View的内容发生改变的时候才去触发invalidate方法,第2个是尽量使用ClipRect等方法来提高绘制的性能。
Useless pixels:减少绘制时不必要的绘制元素,对于那些不可见的元素,我们需要尽量避免重绘。
Wasted CPU cycles:对于不在屏幕上的元素,可以使用Canvas.quickReject把他们给剔除,避免浪费CPU资源。另外尽量使用GPU来进行UI的渲染,这样能够极大的提高程序的整体表现性能。
2. 执行延迟任务,通常有下面三种方式:
AlarmManager:使用AlarmManager设置定时任务,可以选择精确的间隔时间,也可以选择非精确时间作为参数。除非程序有很强烈的需要使用精确的定时唤醒,否者一定要避免使用他,我们应该尽量使用非精确的方式。
SyncAdapter:我们可以使用SyncAdapter为应用添加设置账户,这样在手机设置的账户列表里面可以找到我们的应用。这种方式功能更多,但是实现起来比较复杂(Google官方培训课程)
JobSchedulor:这是最简单高效的方法,我们可以设置任务延迟的间隔,执行条件,还可以增加重试机制。
8.0WorkManager:
3. Android的Heap空间是不会自动做兼容压缩的,意思就是如果Heap空间中的图片被收回之后,这块区域并不会和其他已经回收过的区域做重新排序合并处理,那么当一个更大的图片需要放到heap之前,很可能找不到那么大的连续空闲区域,那么就会触发GC,使得heap腾出一块足以放下这张图片的空闲区域,如果无法腾出,就会发生OOM。
尽量减少PNG图片的大小是Android里面很重要的一条规范。相比起JPEG,PNG能够提供更加清晰无损的图片,但是PNG格式的图片会更大,占用更多的磁盘空间。到底是使用PNG还是JPEG,需要设计师仔细衡量,对于那些使用JPEG就可以达到视觉效果的,可以考虑采用JPEG即可。
Webp,它是由Google推出的一种既保留png格式的优点,又能够减少图片大小的一种新型图片格式。
4. 使用inBitmap属性可以告知Bitmap解码器去尝试使用已经存在的内存区域,新解码的bitmap会尝试去使用之前那张bitmap在heap中所占据的pixel data内存区域,而不是去问内存重新申请一块区域来存放bitmap。利用这种特性,即使是上千张的图片,也只会仅仅只需要占用屏幕所能够显示的图片数量的内存大小。
在SDK 11 -> 18之间,重用的bitmap大小必须是一致的,例如给inBitmap赋值的图片大小为100-100,那么新申请的bitmap必须也为100-100才能够被重用。从SDK 19开始,新申请的bitmap大小必须小于或者等于已经赋值过的bitmap大小。
新申请的bitmap与旧的bitmap必须有相同的解码格式,例如大家都是8888的,如果前面的bitmap是8888,那么就不能支持4444与565格式的bitmap了
- 优化建议
1、RelativeLayout会让子View调用2次onMeasure,在不影响层级深度的情况下,使用LinearLayout和FrameLayout而不是RelativeLayout。
2、如果在View树层级的末端,应尽量用一个RelativeLayout来代替两层LinearLayout或FrameLayout。降低View树的层级才是王道。
3、LinearLayout 在有weight时,可能会调用子View2次onMeasure,降低测量的速度,在使用LinearLayout 应尽量避免使用layout_weight。
LinearLayout 在有weight属性时,为什么是可能会导致 2次measure ?
分析源码发现,并不是所有的layout_weight都会导致两次measure:
Vertical模式下,child设置了weight(height=0,weight > 0)时将会跳过这一次Measure,之后会再一次Measure
//Vertical模式下,child设置(height=0,weight > 0)时将会跳过这一次Measure,之后会再一次Measure
if (heightMode == MeasureSpec.EXACTLY && lp.height == 0 && lp.weight > 0) {
// Optimization: don't bother measuring children who are going to use
// leftover space. These views will get measured again down below if
// there is any leftover space.
final int totalLength = mTotalLength;
mTotalLength = Math.max(totalLength, totalLength + lp.topMargin + lp.bottomMargin);
skippedMeasure = true;//跳过这一次measure
} <file_sep>/source/english.md
## Gradle plugin development introduction
- [中文文档](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/chinese.md)
#### Gradle basic:
This time must be systematically mastered. Are you ready?
- [Acquaintance Gradle and Domain Specific Languages](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/day01.gradle)
- [Gradle version configuration](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/day02.md)
- [Gradle module configuration](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/day03.gradle)
- [Gradle plugin classification](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/day04.gradle)
- [Gradle Android plugin content](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/android.gradle)
- [CompileSdkVersion minSdkVersion targetSdkVersion buildToolsVersion different](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/sdkVersionType.md)
- [Gradle integrate configuration version](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/version.gradle)
- [Gradle branch channel packaging](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/productflavor.gradle)
- [Gradle configuration AndroidManifest](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/configManifest.gradle)
- [Gradle Configure your source path, dynamically remove classes that do not require packaging](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/sourceSet.gradle)
- [Gradle Project Dependency Configuration](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/project_library.md)
- [Gradle lintOption·优](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/lintOption.gradle)
- [lint report](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/lint-results-obmDebug.html)
- [Gradle package optimization configuration](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/optimization.gradle)
- [Gradle gradle.properties Configure gradle version and buildTools version, and some constant version number](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/properties.gradle)
- [Gradle use variantFilter modify the generated apk path, name](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/applicationVariant.gradle)
- [Gradle configure java version](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/set_java_version.gradle)
- [Gradle packagingOptions Solve duplicate packages and files](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/packageOption.gradle)
- [AndroidStudio common problem](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/android_studio.xml)
- [Gradle command package apk](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/assemble.md)
- [Gradle command line passing parameters](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/assembleWithParams.md)
- [Gradle Compiler dynamic generation java excellent](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/operate_file.md)
- [Gradle create Task](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/task.md)
- [Gradle select different AndroidManifest.xml](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/diffManifest.md)
- [Gradle Order of execution](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/exeRank.md)
- Gradle Generate test report
- [Gradle Generate interface document](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/genJavadoc.gradle)
- [AAR generate](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/aar.md)
- [jar generate](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/makeJar.md)
- [Metaprogramming](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/metaprogramming.md)
- See all tasks commands *./gradlew tasks --all*
#### Gradle Advanced plug-in development
- [Plugin development steps](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/plugin_develop.md)
- [Gradle Transform Listening files compile end](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/gradle_tranform.md)
#### Android Performance optimization
- [apk Slimming optimization](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/android_apk_optimization.md)
- [Interface performance UI](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/ui_optimization.md)
- [Memory leak](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/memory_optimization.md)
- [WorkManager](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/workmanager.md)
### Conclusion of issue
- 1. Cannot find dependent libraries, need to add jcenter() in repositories
- 2. javassist can not find the jar package, it is necessary to javassist import jar package
- 3. Found that the generated apk did not change, delete the build directory to rebuild, still no change, click Android Studio setting to clear the cache, restart
- 4. The project app changes the name of the error message is not found when the project, the general root directory.idea can be resolved
- 5. Resolve Error: All flavors must now belong to a named flavor dimension.
flavorDimensions "versionCode"
- 6.Android Studio clean cause Error:Execution failed for task ':app:mockableAndroidJar' > java.lang.NullPointer
Solution 1. This problem is caused by changing the version of the main project complieSdk. It is only necessary to change the version of all subprojects to be the same;
Solution 2. You can also pass
- 1. Go to File -> Settings -> Build, Execution, Deployment -> Compiler
- 2. Add to "Command-line Options": -x :app:mockableAndroidJar
- 3. Press "OK" and try to Rebuild Project again.
Solution 3. File -> Settings -> Build, Execution, Deployment -> Build Tools -> Gradle -> Experimental
Cancel Enable All test.. Checked, but mac version did not find this option
Solution 4. Add in the root directory
gradle.taskGraph.whenReady {
tasks.each { task ->
if (task.name.equals('mockableAndroidJar')) {
task.enabled = false
}
}
}
- 7. When we modify the compile 'com.android.support:appcompat-v7:25.0.0' version, it will report a lot of value
The topic could not be found error
At this point we only need to modify the compile SDK version and this version of V7 to be the same
#### friendship link
[fly803/BaseProject](https://github.com/fly803/BaseProject)
<file_sep>/source/optim_list.md
#### 优化开始,Are you ready?
##### 优化分析工具
- Hierarchy View 在Android SDK里自带,常用来查看界面的视图结构是否过于复杂,用于了解哪些视图过度绘制,又该如何进行改进。
- Lint 是 ADT 自带的静态代码扫描工具,可以给 XML 布局文件和 项目代码中不合理的或存在风险的模块提出改善性建议。官方关于 Lint 的实际使用的提示,列举几点如下:
包含无用的分支,建议去除;
包含无用的父控件,建议去除;
警告该布局深度过深;
建议使用 compound drawables ;
建议使用 merge 标签;
- Systrace 在Android DDMS 里自带,可以用来跟踪 graphics 、view 和 window 的信息,发现一些深层次的问题。(用起来比较复杂)
- Track 在 Android DDMS里自带,是个很棒的用来跟踪构造视图的时候哪些方法费时,精确到每一个函数,无论是应用函数还是系统函数,我们可以很容易地看到掉帧的地方以及那一帧所有函数的调用情况,找出问题点进行优化。
- OverDraw
通过在 Android 设备的设置 APP 的开发者选项里打开 “ 调试 GPU 过度绘制 ” ,来查看应用所有界面及分支界面下的过度绘制情况,方便进行优化。
- GPU 呈现模式分析
通过在 Android 设备的设置 APP 的开发者选项里启动 “ GPU 呈现模式分析 ” ,可以得到最近 128 帧 每一帧渲染的时间,分析性能渲染的性能及性能瓶颈。
- StrictMode
通过在 Android 设备的设置 APP 的开发者选项里启动 “ 严格模式 ” ,来查看应用哪些操作在主线程上执行时间过长。当一些操作违背了严格模式时屏幕的四周边界会闪烁红色,同时输出 StrictMode 的相关信息到 LOGCAT 日志中。
- Animator duration scale
通过在 Android 设备的设置 APP 的开发者选项里打开 “ 窗口动画缩放 ” / “ 过渡动画缩放 ” / “ 动画程序时长缩放 ”,来加速或减慢动画的时间,以查看加速或减慢状态下的动画是否会有问题。
- Show hardware layer updates
通过在 Android 设备的设置 APP 的开发者选项里启动 “ 显示硬件层更新 ”,当 Flash 硬件层在进行更新时会显示为绿色。使用这个工具可以让你查看在动画期间哪些不期望更新的布局有更新,方便你进行优化,以获得应用更好的性能。实例《 Optimizing Android Hardware Layers 》(需要翻墙):「 戳我 」。
##### 优化方法
1 优化布局的结构
- 布局结构太复杂,会减慢渲染的速度,造成性能瓶颈。我们可以通过以下这些惯用、有效的布局原则来优化:
- 避免复杂的View层级。布局越复杂就越臃肿,就越容易出现性能问题,寻找最节省资源的方式去展示嵌套的内容;
- 尽量避免在视图层级的顶层使用相对布局 RelativeLayout 。相对布局 RelativeLayout 比较耗资源,因为一个相对布局 RelativeLayout 需要两次度量来确保自己处理了所有的布局关系,而且这个问题会伴随着视图层级中的相对布局 RelativeLayout 的增多,而变得更严重;
- 布局层级一样的情况建议使用线性布局 LinearLayout 代替相对布局 RelativeLayout*,因为线性布局 LinearLayout 性能要更高一些;确实需要对分支进行相对布局 RelativeLayout 的时候,可以考虑更优化的网格布局 GridLayout ,它已经预处理了分支视图的关系,可以避免两次度量的问题;
- 相对复杂的布局建议采用相对布局 RelativeLayout ,相对布局 RelativeLayout 可以简单实现线性布局 LinearLayout 嵌套才能实现的布局;
- 不要使用绝对布局 AbsoluteLayout ;
- 将可重复使用的组件抽取出来并用 </include> 标签进行重用。如果应用多个地方的 UI 用到某个布局,就将其写成一个布局部件,便于各个 UI 重用。
- 使用 merge 标签减少布局的嵌套层次
- 去掉多余的不可见背景。有多层背景颜色的布局,只留最上层的对用户可见的颜色即可,其他用户不可见的底层颜色可以去掉,减少无效的绘制操作;
- 尽量避免使用 layout_weight 属性。使用包含 layout_weight 属性的线性布局 LinearLayout 每一个子组件都需要被测量两次,会消耗过多的系统资源。在使用 ListView 标签与 GridView 标签的时候,这个问题显的尤其重要,因为子组件会重复被创建。平分布局可以使用相对布局 RelativeLayout 里一个 0dp 的 view 做分割线来搞定,如果不行,那就……;
- 合理的界面的布局结构应是宽而浅,而不是窄而深;
LinearLayout 在有weight属性时,为什么是可能会导致 2次measure ?
分析源码发现,并不是所有的layout_weight都会导致两次measure:
Vertical模式下,child设置了weight(height=0,weight > 0)时将会跳过这一次Measure,之后会再一次Measure
//Vertical模式下,child设置(height=0,weight > 0)时将会跳过这一次Measure,之后会再一次Measure
if (heightMode == MeasureSpec.EXACTLY && lp.height == 0 && lp.weight > 0) {
// Optimization: don't bother measuring children who are going to use
// leftover space. These views will get measured again down below if
// there is any leftover space.
final int totalLength = mTotalLength;
mTotalLength = Math.max(totalLength, totalLength + lp.topMargin + lp.bottomMargin);
skippedMeasure = true;//跳过这一次measure
}
2 优化处理逻辑
- 按需载入视图。某些不怎么重用的耗资源视图,可以等到需要的时候再加载,提高UI渲染速度;
- 使用 ViewStub 标签来加载一些不常用的布局;
- 动态地 inflation view 性能要比用 ViewStub 标签的 setVisiblity 性能要好,当然某些功能的实现采用 ViewStub 标签更合适;
- 尽量避免不必要的耗资源操作,节省宝贵的运算时间;
- 避免在 UI 线程进行繁重的操作。耗资源的操作(比如 IO 操作、网络操作、SQL 操作、列表刷新等)耗资源的操作应用后台进程去实现,不能占用 UI 线程,UI 线程是主线程,主线程是保持程序流畅的关键,应该只操作那些核心的 UI 操作,比如处理视图的属性和绘制;
- 最小化唤醒机制。我们常用广播来接收那些期望响应的消息和事件,但过多的响应超过本身需求的话,会消耗多余的 Android 设备性能和资源。所以应该最小化唤醒机制,当应用不关心这些消失和事件时,就关闭广播,并慎重选择那些要响应的 Intent 。
- 为低端设备考虑,比如 512M 内存、双核 CPU 、低分辨率,确保你的应用可以满足不同水平的设备。
- 优化应用的启动速度。当应用启动一个应用时,界面的尽快反馈显示可以给用户一个良好的体验。为了启动更快,可以延迟加载一些 UI 以及避免在应用 Application 层级初始化代码。
3 善用 DEBUG 工具
- 多使用Android提供的一些调试工具去追踪应用主要功能的性能情况;
- 多使用Android提供的一些调试工具去追踪应用主要功能的内存分配情况<file_sep>/source/version.gradle
android {
// 可以在项目根目录build.gradle配置版本号
compileSdkVersion rootProject.ext.compileSdkVersion
buildToolsVersion "26.0.1"
defaultConfig {
applicationId "com.wangpos.test"
minSdkVersion rootProject.ext.minSdkVersion
targetSdkVersion rootProject.ext.targetSdkVersion
versionCode rootProject.ext.versionCode
versionName "1.0"
}
}
<file_sep>/source/genJavadoc.gradle
//在app/build/docs/javadoc/生成一些html index.html是入口文件,在浏览器打开即可
task javadoc(type: Javadoc,dependsOn: 'build') {
version "1.0.1"
options.addStringOption("charset", "UTF-8")
options.encoding = "UTF-8"
title = "MySDKII"
include(
"com/wangpos/test/inter/*.java",
)
source = android.sourceSets.main.java.srcDirs
failOnError false
}<file_sep>/app/build.gradle
apply plugin: 'com.android.application'
apply from:'../source/initConfig.gradle'
apply from:'../source/initServiceLoader.gradle'
apply from:'../source/android.gradle'
buildscript {
repositories {
maven {
url uri('../repo')
}
jcenter()
}
dependencies {
classpath group: 'com.micky',
name: 'mygradleplugin',
version: '1.0.1'
}
}
apply plugin: 'com.micky.mycustom'
//./gradlew assembleRelease 打包 会生成多个渠道包,在插件修改字节码文件的时候回报错,所以只能单独打包带有修改字节码插件的项目
//apply plugin: 'com.micky.myTransform'
dependencies {
compile fileTree(include: ['*.jar'], dir: 'libs')
androidTestCompile('com.android.support.test.espresso:espresso-core:2.2.2', {
exclude group: 'com.android.support', module: 'support-annotations'
})
compile 'com.android.support:appcompat-v7:25.0.0'
testCompile 'junit:junit:4.12'
implementation "android.arch.work:work-runtime:1.0.0-alpha01"
}
android {
buildToolsVersion '26.0.2'
}<file_sep>/source/optimization.gradle
android{
buildType{
release{
//删除无用资源 shrinkResources true 一般在release里配置
shrinkResources true
//zipalign优化的最根本目的是帮助操作系统更高效率的根据请求索引资源 一般在release里配置
zipAlignEnabled true
//true 是运行混淆文件 ,false 是不运行混淆文件, 如果没有提供混淆规则文件,则设置默认的混淆规则文件 (SDK/tools/proguard/proguard-android.txt)
minifyEnabled true
//提供混淆文件proguardFiles getDefaultProguardFile('proguard-Android.txt'), 'proguard-rules.pro'
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
//是否支持本地化整理
pseudoLocalesEnabled true
//是否打开debuggable开关
debuggable false
//是否打开jniDebuggable开关
jniDebuggable false
signingConfig signingConfigs.release
}
}
}
<file_sep>/source/android.gradle
task javadoc(type: Javadoc,dependsOn: 'build') {
version "1.0.1"
options.addStringOption("charset", "UTF-8")
options.encoding = "UTF-8"
title = "MySDKII"
include(
"com/wangpos/test/inter/*.java",
)
source = android.sourceSets.main.java.srcDirs
failOnError false
}
def custom = project.hasProperty('custom') ? custom : 'defaultCustom'
def testName = project.hasProperty('TestName') ? TestName : 'defaultName'
println(custom)
println(testName)
/**这个task会自动运行,注意脚本运行顺序由上到下*/
task initTask {
println('***初始化模块***')
}
/**在终端手动调用的时候才会执行 ./gradlew releaseJar*/
task releaseJar << {
println('***创建jar包***')
}
//这个Task必须手动调用,因为这是一个继承Copy的task
task copyDocs(type: Copy) {
println("copyDoc开始执行...")
from 'src/main/doc'
into 'build/target/doc'
println("copyDocs执行完...")
}
//依赖javadoc 意思是先把doc生成
task makeSdkJar(type: org.gradle.api.tasks.bundling.Jar, dependsOn: 'javadoc') {
baseName 'TestSDK'
//只打包org.cmdmac下的org.cmdmac.pluginsdk.impl和org.cmdmac.gamecenter,其他子包不会被打包进去
from('build/intermediates/classes/obm/debug') {
//有了include表示只包括某些文件
include 'com/wangpos/test/MainActivity.class'
//
include 'com/wangpos/test/Presenter.class'
}
// into('org/cmdmac/')
// 排除一些文件
exclude('com/wangpos/test/Presenter.class')
// exclude{ it.name.startsWith('R$');}
}
android {
compileSdkVersion rootProject.ext.compileSdkVersion
buildToolsVersion localBuildToolsVersion
defaultConfig {
applicationId "com.wangpos.test"
minSdkVersion rootProject.ext.minSdkVersion
targetSdkVersion rootProject.ext.targetSdkVersion
versionCode 2
versionName "1.0"
testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
flavorDimensions "versionCode"
}
signingConfigs {
keystore {
keyAlias 'androiddebugkey'
keyPassword 'android'
storeFile file('/Users/qiyue/wangpos/code2/keystore/debug.keystore')
storePassword '<PASSWORD>'
}
}
buildTypes {
release {
signingConfig signingConfigs.keystore
minifyEnabled false
manifestPlaceholders = ["app_name": 'Gradle详解正式版']
buildConfigField "boolean", "isDebug", "false"
}
debug {
applicationIdSuffix ".debug"
manifestPlaceholders = ["app_name": 'Gradle详解测试版']
buildConfigField "boolean", "isDebug", "true"
}
other {
applicationIdSuffix ".other"
manifestPlaceholders = ["app_name": 'Gradle普通版本']
buildConfigField "boolean", "isDebug", "false"
}
}
productFlavors {
obm {
buildConfigField "String", "VERSION", "\"obm\""
}
oem {
buildConfigField "String", "VERSION", "\"oem\""
}
odm {
buildConfigField "String", "VERSION", "\"odm\""
}
}
lintOptions {
//build release 版本 时 开启lint 检测
checkReleaseBuilds true
//lint 遇到 error 时继续 构建
abortOnError false
}
dexOptions {
javaMaxHeapSize "4g"
}
testOptions {
resultsDir = "${project.buildDir}/foo/results"
}
project.afterEvaluate {
tasks.getByName("copyDocs") {
it.doFirst {
println("执行配置111")
}
it.doLast {
println("执行配置222")
}
}
}
project.assemble.doLast {
println "assemble finish"
}
project.assemble.doFirst {
println "assemble start"
}
}
/*
* compileSdkVersion 26 //告诉 Gradle 用哪个 Android SDK 版本编译你的应用,当你修改了 compileSdkVersion 的时候,
* 可能会出现新的编译警告、编译错误,但新的 compileSdkVersion 不会被包含到 APK 中:它纯粹只是在编译的时候使用。
* compileSdkVersion 通常我们使用最新的,在编译的时候检查代码的错误和警告,提示开发者修改和优化
* */
/*
buildToolsVersion "26.0.1" 表示构建工具的版本号,这个属性值对应 AndroidSDK 中的 Android SDK Build-tools,
正常情况下 build.gradle 中的 buildToolsVersion 跟你电脑中 Android SDK Build-tools 的最新版本是一致的
*/
/*
minSdkVersion 15 //应用可以运行的最低要求,app运行所需的最低sdk版本.低于minSdkVersion的手机将无法安装.
*/
/*
targetSdkVersion 26
minSdkVersion和targetSdkVersion相信非常多人都不太理解。我在网上也看了很多关于这两者差别的文章,感觉说的都非常模糊。直到我在stackOverFlow看到Android Min SDK Version vs. Target SDK Version这篇文章后,我才最终弄清楚怎样去设置minSdkVersion和targetSdkVersion。如今我将分享给大家。
简言之,这篇文章的目的是为了区分minSDK和targetSDK,这两者相当于一个区间。你能够用到targetSDK中最新的API和最酷的新功能,但你又不得不向下兼容到minSDK,保证这个区间内的设备都能够正常的执行你的app。换句话说,你想使用Android刚刚推出的新特性。但这对于你的app又不是必须的。你就能够将targetSDK设置为你想使用新特性的SDK版本号,minSDK设置成低版本号保证全部人都能够使用你的app。
*/
/*
defaultConfig 配置默认构建属性
*/
/*
applicationId 指定应用唯一标识,相同的applicationId不能安装到同一台设备
adb uninstall 这里是applicationId
adb uninstall com.wangpos.test
adb uninstall com.wangpos.test.debug
versionCode 应用版本号
versionName 应用版本名称
packages 是改变会影响资源文件的应用,R文件生成路径
*/
/*
* BuildType 构建类型
*
* 默认情况下,Android Plugin 会自动给项目构建 debug 和 release 版本。两个版本的区别在于能否在安全设备
* (非 dev)上调试,以及 APK 如何签名。debug 使用通过通用的 name/password 对生成的密钥证书进行签名
* (为了防止在构建过程中出现认证请求)。release 在构建过程中不进行签名,需要自行签名。
*
* 我们在开开发过程可以自己添加很多调试版本,通过Android Studio左下角Build Variants 选择当前的调试版本
*
*
*/
/*
buildscript 声明是gradle脚本自身需要使用的资源。可以声明的资源包括依赖项、第三方插件、maven仓库地址等
buildscript和allprojects的作用和区别
buildscript中的声明是gradle脚本自身需要使用的资源,就是说他是管家自己需要的资源,跟你这个大少爷其实并没有什么关系。
而allprojects声明的却是你所有module所需要使用的资源,就是说如果大少爷你的每个module都需要用同一个第三库的时候,你可以在allprojects里面声明
*/
/*
productFlavors 多渠道打包, 和BuildScript组合使用,可以构建多总不同的版本
如果productFlavors3种产品,buildScript中两种模式,最终会有3 X 2 = 6种版本类型
*/
/*
repositories
顾名思义就是仓库的意思啦,而jcenter()、maven()和google()就是托管第三方插件的平台
dependencies
当然配置了仓库还不够,我们还需要在dependencies{}里面的配置里,把需要配置的依赖用classpath配置上,因为这个dependencies在buildscript{}里面,
所以代表的是Gradle需要的插件。
*/
/*
manifestPlaceholders
占位符,我们可以通过它动态配置AndroidManifest文件一些内容,譬如app的名字
*/
/*
OBM:A设计,A生产,A品牌,A销售==工厂自己设计自产自销
ODM:B设计,B生产,A品牌,A销售==俗称“贴牌”,就是工厂的产品,别人的品牌
OEM:A设计,B生产,A品牌,A销售==代工,代生产,别人的技术和品牌,工厂只生产
*/
/*
dexOptions{}
我们知道,Android中的Java源代码被编译成class字节码后,在打包成apk的时候
被dx命令优化成Android虚拟机可执行的DEX文件。
DEX文件比较紧凑,Android费尽心思做了这个DEX格式,就是为了能使我们的程序在Android中平台上运行快一些。对于这些生成DEX文件的过程和处理,Android Gradle插件都帮我们处理好了,Android Gradle插件会调用SDK中的dx命令进行处理。
但是有的时候可能会遇到提示内存不足的错误,大致提示异常是
java,lang.OutOfMemoryError: GC overhead limit exceeded,为什么会提示内存不足呢?
其实这个dx命令只是一个脚本,它调用的还是Java编写的dx.jar库,是Java程序处理的,所以当内存不足的时候,我们会看到这个Java异常信息.默认情况下给dx分配的内存是一个G8,也就是 1024MB。
所以我们只需要把内存设置大一点,就可以解决这个问题,上图我的项目就把内存设置为4g。
*/
/*
dependencies{}
我们平时用的最多的大概就这个了,
1. 首先第一句compile fileTree(include: ['.jar'], dir: 'libs')*,这样配置之后本地libs文件夹下的扩展名为jar的都会被依赖,非常方便。
2. 如果你要引入某个本地module的话,那么需要用compile project('×××')。
3. 如果要引入网上仓库里面的依赖,我们需要这样写compile group:'com.squareup.okhttp3',name:'okhttp',version:'3.0.1',当然这样是最完整的版本,缩写就把group、name、version去掉,然后以":"分割即可。
compile 'com.squareup.okhttp3:okhttp:3.0.1'
*/
/*
gradle 3.0中依赖implementation、api的区别:
其实api跟以前的compile没什么区别,将compile全部改成api是不会错的;
而implementation指令依赖是不会传递的,也就是说当前引用的第三方库仅限于本module内使用,其他module需要重新添加依赖才能用,下面用两个图说明:
*/<file_sep>/source/16ms_do.md
### 16ms对于系统干了那些事
- 1.手机屏幕是由许多的像素点组成的,每个像素点通过显示不同的颜色最终屏幕呈现各种各样的图像.
- 2.像素从哪里来?
在GPU中有一块缓冲区叫做 Frame Buffer ,这个帧缓冲区可以认为是存储像素值的二位数组,
数组中的每一个值就对应了手机屏幕的像素点需要显示的颜色.
由于这个帧缓冲区的数值是在不断变化的,所以只要完成对屏幕的刷新就可以显示不同的图像了.
至于刷新工作手机的逻辑电路会定期的刷新 Frame Buffer的 目前主流的刷新频率为60次/秒
折算出来就是16ms刷新一次
- 3.Frame Buffer 中的数据怎么来
1)GPU 除了帧缓冲区 用以交给手机屏幕进行绘制外. 还有一个缓冲区 Back Buffer 这个用以交给应用的,让你往里面填充数据.
GPU会定期交换 Back Buffer 和 Frame Buffer 也就是让Back Buffer中的数据转到 Frame Buffer 然后交给屏幕进行显示绘制,
同时让原先的Frame Buffer 变成 Back Buffer 让程序处理.
2)这也就是贯穿整个安卓系统的双缓冲机制,实际上的帧就保存在这两个缓冲区,
A缓冲区用来显示当前帧,B缓冲区就用来缓存或者成为处理下一帧数据,
这样就可以做到一边处理一边显示,双缓冲主要加快栅格化速度
- 4.如何发生丢帧现象的
Frame Buffer和Back Buffer的数据是自动完成交换的,我们需要处理的是根据需求向 Back Buffer中填充数据即可,
那么在向Back Buffer填充数据期间此时如果需要两者交换处理怎么办呢?
由于在填充Back Buffer数据时 系统会将 Back Buffer 锁上,因此此时这次数据交换就会被放弃,
这样手机还是显示原来的内容?等到下次(再隔一个16ms)完成交换再显示新的图像时一共需要的时间是32ms,这就是丢帧.如果是严重丢帧就是需要等待更多的16ms
- 5.从XML到显示屏幕的过程
1)绝大多数渲染操作都依赖两个硬件: CPU 、 GPU 。 CPU 负责 Measure 、 layout 、 Record 、 Execute 的计算操作,
GPU 负责栅格化( Rasterization )操作。
2)CPU会将ui组件计算成多边形(polygons)和纹理(textures),然后交给GPU进行栅格化渲染,
最后GPU将数据传给屏幕进行绘制显示.当然在CPU通过OpenGL ES API处理传递给 GPU之间还需要经过
我们需要在 16 ms 内处理完所有 CPU 和 GPU 的计算、绘制、渲染等操作,才能获得应用的流畅体验。
- 6.避免帧丢失(为了避免帧丢失,我们就要确保更新时间不要超过16ms)
- 尽可能的减少布局的嵌套 避免无用的布局
- 对于自定义view 减少不必要的invalidate的调用
- 去除view中不必要的background, 比如ImageView,假如它显示的图片填满了它的空间,就没必要给它设置背景色
**在向Back Buffer填充数据时** Back Buffer 会被锁定,然后会将一个指向它的 Canvas 对象交给程序.
应用程序收到这个对象后按照视图层次从上往下遍历. 在view 的onDraw(Canvas canvas)方法中就是这个对象,
减少视图层可以减少canvas的传递时间
invalidate 方法会使视图重绘,所以减少该方法的使用同时合理的使用该方法也会降低丢帧率.
#### 整个16ms并不是完全让我绘制界面的,还有layout,measue等方法的调用呢,所以留给用于绘制自己的界面的时间肯定是少于16ms的<file_sep>/source/memory_optimization.md
### 内存泄露
<img width="480" height="240" src="https://github.com/UCodeUStory/GradlePlugin/blob/master/source/gc1.jpeg"/>
<img width="480" height="240" src="https://github.com/UCodeUStory/GradlePlugin/blob/master/source/gc2.jpeg"/>
- 内存泄漏指的是那些程序不再使用的对象无法被GC识别。
内存泄露是内存溢出OOM的重要原因之一,会导致Crash
- 导致频繁GC,可能是内存抖动,或者瞬间产生大量对象。
**GC期间所有线程将暂停,GC所占用的时间和它是哪一个Generation也有关系,执行时间的长短也和当前Generation中的对象数量有关。
- 导致GC频繁执行有两个原因:
Memory Churn内存抖动,内存抖动是因为大量的对象被创建又在短时间内马上被释放。
瞬间产生大量的对象会严重占用Young Generation(新生的)的内存区域,当达到阀值,剩余空间不够的时候,也会触发GC。即使每次分配的对象占用了很少的内存,但是他们叠加在一起会增加Heap的压力,从而触发更多其他类型的GC。这个操作有可能会影响到帧率,并使得用户感知到性能问题。
例如,你需要避免在for循环里面分配对象占用内存,需要尝试把对象的创建移到循环体之外,自定义View中的onDraw方法也需要引起注意,每次屏幕发生绘制以及动画执行过程中,onDraw方法都会被调用到,避免在onDraw方法里面执行复杂的操作,避免创建对象。对于那些无法避免需要创建对象的情况,我们可以考虑对象池模型,通过对象池来解决频繁创建与销毁的问题,但是这里需要注意结束使用之后,需要手动释放对象池中的对象。
- 解决内存抖动
用对象池技术有很多好处,它可以避免内存抖动,提升性能,但是在使用的时候有一些内容是需要特别注意的。通常情况下,初始化的对象池里面都是空白的,当使用某个对象的时候先去对象池查询是否存在,如果不存在则创建这个对象然后加入对象池,但是我们也可以在程序刚启动的时候就事先为对象池填充一些即将要使用到的数据,这样可以在需要使用到这些对象的时候提供更快的首次加载速度,这种行为就叫做预分配。使用对象池也有不好的一面,程序员需要手动管理这些对象的分配与释放,所以我们需要慎重地使用这项技术,避免发生对象的内存泄漏。为了确保所有的对象能够正确被释放,我们需要保证加入对象池的对象和其他外部对象没有互相引用的关系。
#### 检测工具
- Leaks
傻瓜式的内存检测工具,但是非常好用
- 当然我们可以用AS Monitor+MAT来自己分析内存泄漏原因
#### 那么都有哪些资源是GC Roots呢?
1.Class 由System Class Loader/Boot Class Loader加载的类,这些类不会被回收。注意是类不会被回收,实例还是会被回收的,但是不依赖实例的静态static变量是依赖类的,因此很多内存泄露都是因为被静态变量引用导致的。
2. Thread 线程,激活状态的线程;
3. Stack Local 栈中的对象。每个线程都会分配一个栈,栈中的局部变量或者参数都是GC root,因为它们的引用随时可能被用到;
4. JNI Local JNI中的局部变量和参数引用的对象;可能在JNI中定义的,也可能在虚拟机中定义
5. JNI Global JNI中的全局变量引用的对象;同上
6. Monitor Used 用于保证同步的对象,例如wait(),notify()中使用的对象、锁等。
7. Held by JVM JVM持有的对象。JVM为了特殊用途保留的对象,它与JVM的具体实现有关。比如有System Class Loader, 一些Exceptions对象,和一些其它的Class Loader。对于这些类,JVM也没有过多的信息。
也就是说所有的内存泄漏问题从根本上都是因为被这些GC Root引用着导致的
#### 常见问题
- 非静态内部类,匿名内部类(由于原因1,handler)
- Thread(由于原因2)
- ContentObserver,File,Cursor,Stream,Bitmap等资源未关闭(由于原因3)
- Webview 内存泄露
1.可以将 Webview 的 Activity 新起一个进程,结束的时候直接System.exit(0);退出当前进程;
启动新进程,主要代码: AndroidManifest.xml 配置文件代码如下
<activity
android:name=".ui.activity.Html5Activity"
android:process=":lyl.boon.process.web">
<intent-filter>
<action android:name="com.lyl.boon.ui.activity.htmlactivity"/>
<category android:name="android.intent.category.DEFAULT"/>
</intent-filter>
</activity>
在新进程中启动 Activity ,里面传了 一个 Url:
Intent intent = new Intent("com.lyl.boon.ui.activity.htmlactivity");
Bundle bundle = new Bundle();
bundle.putString("url", gankDataEntity.getUrl());
intent.putExtra("bundle",bundle);
startActivity(intent);
然后在 Html5Activity 的onDestory() 最后加上 System.exit(0); 杀死当前进程。
2.不能在xml中定义 Webview ,而是在需要的时候创建,并且Context使用 getApplicationgContext(),如下代码:
LinearLayout.LayoutParams params = new LinearLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT);
mWebView = new WebView(getApplicationContext());
mWebView.setLayoutParams(params);
mLayout.addView(mWebView);
3.在 Activity 销毁的时候,可以先让 WebView 加载null内容,然后移除 WebView,再销毁 WebView,最后置空。
代码如下:
@Override
protected void onDestroy() {
if (mWebView != null) {
mWebView.loadDataWithBaseURL(null, "", "text/html", "utf-8", null);
mWebView.clearHistory();
((ViewGroup) mWebView.getParent()).removeView(mWebView);
mWebView.destroy();
mWebView = null;
}
super.onDestroy();
}
- BraodcastReceiver,EventBus等观察者注册未注销(由于原因1)
- 单例Dialog 一直持有Context(这种依赖Context 不要使用单例)
#### 解决原则
- 内部类静态化,内部类里面的资源及时关闭不要静态化
- 注意线程的及时关闭
- 注意资源的及时关闭
- webView单独开线程(下面有具体的例子)
- 同样需要及时关闭<file_sep>/source/lintOption.gradle
lintOptions {
//build release 版本 时 开启lint 检测
checkReleaseBuilds true
//lint 遇到 error 时继续 构建
abortOnError false
}
//也可以手动在在项目根目录执行./gradlew lint 执行完后会生成 各项目的信息,最后生成一个xml报告(
// 会在项目/app/build/outputs/lint-results-构建flavor名.html)然后可以使用浏览器打开查看
//Wrote HTML report to file:///Users/qiyue/GitProject/GradlePluginWS/GradlePlugin/app/build/outputs/lint-results-obmDebug.html
<file_sep>/source/diffManifest.md
- 构建过程中配置不同的AndroidManifest.xml
比如遇到一个需求安装一个没有图标的应用,而调试的时候我们是需要打开才方便调试的
这就需要在release时去掉:
<category android:name="android.intent.category.LAUNCHER" />
首先我们可以在gradle.properties 线面添加属性
isDebug=true
在项目的app(有可能这里改了名字)目录build.gradle 配置:
sourceSets {
main {
if (!isDebug.toBoolean()) {//主项目需要在非debug模式下开发
//release
manifest.srcFile 'src/release/AndroidManifest.xml'
} else {
//debug
manifest.srcFile 'src/debug/AndroidManifest.xml'
}
}
}<file_sep>/source/properties.gradle
android {
//在gradle.properties中定义版本号 ,gradle.properties在提交SVN和git 解决每个人使用构建版本不同问题
buildToolsVersion localBuildToolsVersion
}
/*
在项目根目录配置localGradlePluginVersion属性 解决不同开发人员使用不同gradle问题
dependencies {
classpath 'com.android.tools.build:gradle:'+localGradlePluginVersion
}
*/<file_sep>/source/gradle_tranform.md
### 新增Gradle Transform
监听文件编译结束,通过javasist实现字节码修改,实现代码插入,通过这种插件化的AOP 和代码中使用Aspectj 区别就是,避免代码碎片化,添加一个功能修改多处代码,即使用了Aspectj 也许要在修改的地方添加注解,当修改处很多的时候很不方便,通过transform和javassist 可以遍历整个工程,按照满足条件的一次性修改,并且以后我们可以写成通用性的组建,比如自动注册一些组建在所有Activity,里面Javassist用了反射原理,但是这是编译器,不损失效率,Javassist非常强大,需要仔细学习
1.新建一个MyTransform 再新建一个插件MyPlugin注册这个Transform
Mytransform 重写transform方法
里面要将输入内容复制给输出,否者报错,这是第一步,其实就是相当于在运行我们给拦截了,必须再把内容输出出去才能打包成dex
里面遍历
// Transform的inputs有两种类型,一种是目录,一种是jar包,要分开遍历
inputs.each { TransformInput input ->
//对类型为“文件夹”的input进行遍历
input.directoryInputs.each { DirectoryInput directoryInput ->
//文件夹里面包含的是我们手写的类以及R.class、BuildConfig.class以及R$XXX.class等
/**
* 这里就统一处理一些逻辑,避免代码分散,碎片化
*/
println("transform transformsalkfjdl;kajf#####################*********")
println(directoryInput.file.absolutePath)
MyInject.injectDir(directoryInput.file.absolutePath,"com\\wangpos\\test",project)
// 获取output目录
def dest = outputProvider.getContentLocation(directoryInput.name,
directoryInput.contentTypes, directoryInput.scopes,
Format.DIRECTORY)
// 将input的目录复制到output指定目录
FileUtils.copyDirectory(directoryInput.file, dest)
}
//对类型为jar文件的input进行遍历
input.jarInputs.each { JarInput jarInput ->
//jar文件一般是第三方依赖库jar文件
// 重命名输出文件(同目录copyFile会冲突)
def jarName = jarInput.name
def md5Name = DigestUtils.md5Hex(jarInput.file.getAbsolutePath())
if (jarName.endsWith(".jar")) {
jarName = jarName.substring(0, jarName.length() - 4)
}
//生成输出路径
def dest = outputProvider.getContentLocation(jarName + md5Name,
jarInput.contentTypes, jarInput.scopes, Format.JAR)
//将输入内容复制到输出
FileUtils.copyFile(jarInput.file, dest)
}
}<file_sep>/source/task.md
### Task 用法
- 一个Task代表一个构建工作的原子操作,例如编译calsses或者生成javadoc。
Gradle中,每一个待编译的工程都叫一个Project。每一个Project在构建的时候都包含一系列的Task。比如一个Android APK的编译可能包含:Java源码编译Task、资源编译Task、JNI编译Task、lint检查Task、打包生成APK的Task、签名Task等。插件本身就是包含了若干Task的。
如下就是一个task的简单例子:
//脚本运行的时候就会不调用
task initTask{
println('***初始化模块***')
}
/**在终端手动调用的时候才会执行 ./gradlew releaseJar*/
task releaseJar <<{
println('***创建jar包***')
}
//如果代码没有加<<,则这个任务在脚本initialization的时候执行 ./gradlew releaseJar*/
//这和我们调用task这个函数的方式有关!如果没有<<,则闭包在task函数返回前会执行,而如果加了<<,则变成调用myTask.doLast添加一个Action了,自然它会等到grdle myTask的时候才会执行!
task myTask
task myTask { configure closure } // closure是一个闭包
task myType << { task action } // <<符号是doLast的缩写
task myTask(type: SomeType) // SomeType可以指定任务类型,Gradle本身提供有Copy、Delete、Sync等
task myTask(type: SomeType) { configure closure }
- 一个Task包含若干Action。所以,Task有doFirst和doLast两个函数,用于添加需要最先执行的Action和需要和需要最后执行的Action。Action就是一个闭包。闭包,英文叫Closure,是Groovy中非常重要的一个数据类型或者说一种概念。
- Task创建的时候可以通过 type: SomeType 指定Type,Type其实就是告诉Gradle,这个新建的Task对象会从哪个基类Task派生。比如,Gradle本身提供了一些通用的Task,最常见的有Copy 任务。Copy是Gradle中的一个类。当我们:task myTask(type:Copy)的时候,创建的Task就是一个Copy Task。
- 当我们使用 taskmyTask{ xxx}的时候,花括号就是一个closure。
- 当我们使用taskmyTask << {xxx}的时候,我们创建了一个Task对象,同时把closure做为一个action加到这个Task的action队列中,并且告诉它“最后才执行这个closure”
### Type
####Copy
- 将文件复制到目标目录。此任务在复制时也可以执行重命名和过滤文件操作。它实现了CopySpec接口,使用CopySpec.from()方法可以指定源文件,CopySpec.into()方法可以指定目标目录。
task copyDocs(type: Copy) {
from 'src/main/doc'
into 'build/target/doc'
}
//这是个Ant filter
import org.apache.tools.ant.filters.ReplaceTokens
//这是一个闭包
def dataContent = copySpec {
from 'src/data'
include '*.data'
}
task initConfig(type: Copy) {
from('src/main/config') {
include '**/*.properties'
include '**/*.xml'
filter(ReplaceTokens, tokens: [version: '2.3.1'])
}
from('src/main/config') {
exclude '**/*.properties', '**/*.xml'
}
from('src/main/languages') {
rename 'EN_US_(.*)', '$1'
}
into 'build/target/config'
exclude '**/*.bak'
includeEmptyDirs = false
with dataContent
}
- 替换AndroidManifest文件
task chVer(type: Copy) { // 指定Type为Copy任务
from "src/main/manifest/AndroidManifestCopy.xml" // 复制src/main/manifest/目录下的AndroidManifest.xml
into 'src/main' // 复制到指定目标目录
rename { String fileName -> //在复制时重命名文件
fileName = "AndroidManifest.xml" // 重命名
}
}
- 替换so文件
task chSo(type: Copy) {
from "src/main/jniLibs/test" // 复制test文件夹下的所有so文件
into "src/main/jniLibs/armeabi-v7a" //复制到armeabi-v7a文件夹下
}
这样每次打包APK前执行以上任务就可以自动替换文件啦!
问:那如果有多个任务需要执行是不是要执行多次任务呢?
答:可以通过多任务命令调用一次即可。
gradlew task1 task2 [...]
问:任务名太长不想输入这么多字怎么办?
答:可以采用简化操作,但是必须保证可以唯一区分出该任务的字符,如:
gradlew cV
问:那我不想每次打包前都输入命令怎么办?
答:可以每次build时自动执行自定义任务。
afterEvaluate {
tasks.matching {
// 以process开头以ReleaseJavaRes或DebugJavaRes结尾的task
it.name.startsWith('process') && (it.name.endsWith('ReleaseJavaRes') || it.name.endsWith
('DebugJavaRes'))
}.each { task ->
task.dependsOn(chVer, chSo) // 任务依赖:执行task之前需要执行dependsOn指定的任务
}
}
####Sync
- 此任务与Copy任务类似,唯一的区别是当执行时会复制源文件到目标目录,目标目录中所有非复制文件将会被删除,除非指定Sync.preserve(org.gradle.api.Action)。
例子:
task syncDependencies(type: Sync) {
from 'my/shared/dependencyDir'
into 'build/deps/compile'
}
// 你可以保护目标目录已经存在的文件。匹配的文件将不会被删除。
task sync(type: Sync) {
from 'source'
into 'dest'
preserve {
include 'extraDir/**'
include 'dir1/**'
exclude 'dir1/extra.txt'
}
}
#### Zip
- 创建ZIP归档文件,默认压缩文件类型为zip。
例子:
task zip(type: Zip) {
from 'src/dist'
into('libs')
}
###自定义Task
// 需要继承自DefaultTask
class HelloWorldTask extends DefaultTask {
// @Optional 表示在配置该Task时,message是可选的。
@Optional
String message = 'I am kaku'
// @TaskAction 表示该Task要执行的动作,即在调用该Task时,hello()方法将被执行
@TaskAction
def hello(){
println "hello world $message"
}
}
// hello使用了默认的message值
task hello(type:HelloWorldTask)
// 重新设置了message的值
task helloOne(type:HelloWorldTask){
message ="I am a android developer"
}
<file_sep>/source/day01.gradle
//Gradle是一个基于Apache Ant和Apache Maven概念的项目自动化构建工具。它使用一种基于Groovy的特定领域语言(DSL)来声明项目设置,抛弃了基于XML的各种繁琐配置
//Gradle是一个基于JVM的构建工具,是一款通用灵活的构建工具,支持maven, Ivy仓库,支持传递性依赖管理,而不需要远程仓库或者是pom.xml和ivy.xml配置文件,基于Groovy,build脚本使用Groovy编写
//Gradle为每个build.gradle都会创建一个相应的Project领域对象,在编写Gradle脚本时,我们实际上是在操作诸如Project这样的Gradle领域对象。
//看了上面专业的术语还是有点懵逼,Gradle 就是一个构建工具,帮助我们构建Android程序,最终生成apk。
//DSL是什么?domain specific language 中文领域专用语言
//领域专用语言是什么?其基本思想是“求专不求全”,不像通用目的语言那样目标范围涵盖一切软件问题,而是专门针对某一特定问题的计算机语言。(不明白?读10遍后,还不明白?看下面)
//DSL 已经存在很久了!。其中一个例子,就是 UNIX 系统中就具有 专属于他自己的小型语言。这当中包括丰富的用于排字用的语言(troff,eqn),shell 工具(awk, sed),以及软件开发工具(make,yacc)。
//要构建一种DSL,按最常见的Unix风格的做法,就是先定义它的语法,然后通过代码生成技术把DSL代码转成一种通用语言代码(如C C++ Java等等),或者写一个这种DSL的解释器。Unix有很多工具能让这件事做起来轻松些。我为这类DSL定了一个术语:“外部DSL”。XML配置文件是外部DSL的另一种常见形式。
//DSL也是Lisp和Smalltalk社群的一项重要传统,但方式不同,他们不是动手新造一套语言,而是让Lisp或Smalltalk这种通用目的语言换个颜面变成DSL。(<NAME>的文章《自底向上编程》对此有精彩讲述。)利用编程语言自带的语法结构定义出来的DSL,我称之为“内部DSL”,也叫做“内嵌DSL”。
//DSL主要分为三类:外部DSL、内部DSL,以及语言工作台。
//外部DSL是一种“不同于应用系统主要使用语言”的语言。外部DSL通常采用自定义语法,不过选择其他语言的语法也很常见(XML就是一个常见选择)。宿主应用的代码会采用文本解析技术对使用外部DSL编写的脚本进行解析。一些小语言的传统UNIX就符合这种风格。可能经常会遇到的外部DSL的例子包括:正则表达式、SQL、Awk,以及像Struts和Hibernate这样的系统所使用的XML配置文件。
//内部DSL是一种通用语言的特定用法。用内部DSL写成的脚本是一段合法的程序,但是它具有特定的风格,而且只用到了语言的一部分特性,用于处理整个系统一个小方面的问题。用这种DSL写出的程序有一种自定义语言的风格,与其所使用的宿主语言有所区别。这方面最经典的例子是Lisp。Lisp程序员写程序就是创建和使用DSL。Ruby社区也形成了显著的DSL文化:许多Ruby库都呈现出DSL的风格。特别是,Ruby最著名的框架Rails,经常被认为是一套DSL。
//语言工作台是一个专用的IDE,用于定义和构建DSL。具体来说,语言工作台不仅用来确定DSL的语言结构,而且是人们编写DSL脚本的编辑环境。最终的脚本将编辑环境和语言本身紧密结合在一起。
// 领域专用语言详解解释,包括内部DSL和外部DSL https://martinfowler.com/articles/languageWorkbench.html
//
//DSLs实现模型
//实现DSL,主要是针对特定问题域进行动态状态建模,模型可以是任意的:对象模型,结构化模型或者其他的任何实现模型;程序语言通常会很关注语法以及语义,DSLs中的建模主要就是为了建立问题域的描述语义。
//DSLs实现方法
//代码生成(Code-Generation)和解释运行(Interpretation)是DSL的两种实现方式,前者在编译时处理模型,后者在运行时应用模型。
//从实现的角度来讲,前者更加快捷方便,而后者更加精致有效;短期看代码生成可以很快应用,长期看解释运行更能形成效益。
//内部DSL:通用语言的特定用法。内部DSL通常是一段合法的程序,但是具有特定的风格。而且只用到了语言一部分特性。
//总结
//
// 结论1:DSL分两种 ,一种是内部DSL,一种是外部DSL
// 结论2:外部DSL可以灵活的使用自己喜欢的方式去编写,但你必须还要开发一个翻译器,具体能提供的能力受限制与翻译器的能力,比如常用的遵循DTD标准的XML配置,在java项目中使用需要java提供一个翻译xml的工具(比如IntelliJ 可以帮助我们生成代码也是一种DSL)
// 结论3:内部DSL写成的脚本是一段合法的程序,但是它具有特定的风格,而且只用到了语言的一部分特性,用于处理整个系统一个小方面的问题。用这种DSL写出的程序有一种自定义语言的风格,与其所使用的宿主语言有所区别。
// 结论4:Gradle是一个基于JVM的构建工具,它使用一种基于Groovy的特定领域语言(DSL)来声明项目设置
// 结论5:Groovy 的 闭包、元编程 、MOP特性决定了可以创建各种DSL
<file_sep>/source/productflavor.gradle
-配置多渠道打包productFlavors
//
//1)为什么要多渠道打包?
//
//安卓应用商店(一个商店也叫做一个渠道,如360,baidu,xiaomi)众多,大大小小几百个,我们发布应用之后需要统计各个渠道的用户下载量,所以才有了多渠道打包。
//现在有比较成熟的第三方应用帮我们实现统计功能(比如友盟),统计的本质就是收集用户信息传输到后台,后台生成报表,帮助我们跟踪分析并完善app。通过系统的方法已经可以获取到,
//版本号,版本名称,系统版本,机型,地区等各种信息,唯独应用商店(渠道)的信息我们是没有办法从系统获取到的,所以我们就人为的在apk里面添加渠道信息(其实就用一个字段进行标识,如360,baidu),
//我们只要把这些信息打包到apk文件并将信息传输到后台,后台根据这个标识,可以统计各个渠道的下载量了,并没有多么的高大上。
//
//说了那么多,其实多渠道打包只需要关注两件事情:
//
//
//将渠道信息写入apk文件
//
//将apk中的渠道信息传输到统计后台
//
//
//添加配置,以友盟的方式,传到友盟来统计,他需要UMENG_CHANNEL_VALUE来区分
//
//
android {
productFlavors {
xiaomi {
manifestPlaceholders = [UMENG_CHANNEL_VALUE: "xiaomi"]
}
_360 {
manifestPlaceholders = [UMENG_CHANNEL_VALUE: "_360"]
}
baidu {
manifestPlaceholders = [UMENG_CHANNEL_VALUE: "baidu"]
}
wandoujia {
manifestPlaceholders = [UMENG_CHANNEL_VALUE: "wandoujia"]
}
}
}
//
//然后在Android左下角可以通过Build Variants选择构建不同渠道应用
//
//每种渠道都对应这些buildType 在选择buildVarient时候会全部显示出来
//
//通过不同的打包渠道,我们还可以将框架层抽离,比如我们的开发两个app图标和应用名字不一样,应用也不一样,可以通过这种,来实现多个apk
android{
buildTypes {
release {
signingConfig signingConfigs.keystore
minifyEnabled false
manifestPlaceholders = ["app_name":'Gradle详解正式版']
buildConfigField "boolean","isDebug","false"
}
debug{
applicationIdSuffix ".debug"
manifestPlaceholders = ["app_name":'Gradle详解测试版']
buildConfigField "boolean","isDebug","true"
}
other{
applicationIdSuffix ".other"
manifestPlaceholders = ["app_name":'Gradle普通版本']
buildConfigField "boolean","isDebug","false"
}
}
productFlavors{
obm{
buildConfigField "String","VERSION","\"obm\""
}
oem{
buildConfigField "String","VERSION","\"oem\""
}
odm{
buildConfigField "String","VERSION","\"odm\""
}
}
}
<file_sep>/source/configManifest.gradle
//
//
//通过${name},使得你可以在你的Manifest插入一个占位符。看下面的例子:
//
//<activity android:name=".Main">
//<intent-filter>
//<action android:name="${applicationId}.foo">
//</action>
// </intent-filter>
//</activity>
//
// 通过上面的代码,${applicationId}会被替换成真实的applicationId,例如对于branchOne这个variant,它会变成:
//
// <action android:name="com.example.branchOne.foo">
//
//
//这是非常有用的,因为我们要根据variant用不同的applicationId填充Manifest.
//
//
//如果你想创建自己的占位符,你可以在manifestPlaceholders定义,语法是:
//
// productFlavors {
// branchOne {
// manifestPlaceholders = [branchCustoName :"defaultName"]
// }
// branchTwo {
// manifestPlaceholders = [branchCustoName :"otherName"]
// }
// }
//
<file_sep>/README.md
## Gradle插件开发介绍
- [英文文档](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/english.md)
#### Gradle基础详解:
这一次一定要系统掌握,你准备好了吗?
- [初识Gradle 和 领域专用语言](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/day01.gradle)
- [Gradle 版本配置](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/day02.md)
- [Gradle 模块配置](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/day03.gradle)
- [Gradle 插件分类](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/day04.gradle)
- [Gradle Android插件包含的内容](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/android.gradle)
- [CompileSdkVersion minSdkVersion targetSdkVersion buildToolsVersion区别](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/sdkVersionType.md)
- [Gradle 统一配置你的版本号](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/version.gradle)
- [Gradle 分渠道打包](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/productflavor.gradle)
- [Gradle 配置你的AndroidManifest](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/configManifest.gradle)
- [Gradle 指定你的源码路径、动态去除不需要打包的类·优](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/sourceSet.gradle)
- [Gradle 项目依赖配置](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/project_library.md)
- [Gradle lintOption·优](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/lintOption.gradle)
- [lint报告](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/lint-results-obmDebug.html)
- [Gradle 打包优化配置·优](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/optimization.gradle)
- [Gradle gradle.properties 配置gradle版本和buildTools版本,和一些不便的版本](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/properties.gradle)
- [Gradle 使用variantFilter修改生成apk路径、名字](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/applicationVariant.gradle)
- [Gradle 指定java版本](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/set_java_version.gradle)
- [Gradle packagingOptions解决重复包和文件](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/packageOption.gradle)
- [AndroidStudio常见问题](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/android_studio.xml)
- [Gradle 命令打包apk](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/assemble.md)
- [Gradle 命令行传递参数](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/assembleWithParams.md)
- [Gradle 编译器动态生成java·优](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/operate_file.md)
- [Gradle 创建Task](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/task.md)
- [Gradle 打包选择不同的AndroidManifest.xml](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/diffManifest.md)
- [Gradle 执行顺序](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/exeRank.md)
- Gradle 生成测试报告
- [Gradle 生成接口文档](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/genJavadoc.gradle)
- [AAR 生成](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/aar.md)
- [jar 生成](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/makeJar.md)
- [元编程](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/metaprogramming.md)
- 查看所有tasks命令 *./gradlew tasks --all*
#### Gradle高级插件开发
- [插件开发详细步骤](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/plugin_develop.md)
- [Gradle Transform监听文件编译结束](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/gradle_tranform.md)
#### Android性能优化
- [apk瘦身优化](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/android_apk_optimization.md)
- [界面性能UI](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/ui_optimization.md)
- [内存泄露](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/memory_optimization.md)
- [WorkManager](https://github.com/UCodeUStory/GradlePlugin/blob/master/source/workmanager.md)
### 问题总结
- 1.找不到依赖库,需要在repositories中添加jcenter()
- 2.javassist找不到jar包,就是需要javassist引入jar包
- 3.发现生成的apk没有变化,删除了build目录重新build,仍然无变化,点击Android Studio setting 清理缓存,重新启动
- 4.项目app修改名字报错时提示找不到项目,一般根目录.idea就可以解决
- 5.解决Error:All flavors must now belong to a named flavor dimension.
flavorDimensions "versionCode"
- 6.Android Studio clean 时产生 Error:Execution failed for task ':app:mockableAndroidJar' > java.lang.NullPointer
解决1. 这个问题由于更改主项目complieSdk版本导致的,只需要将所有子项目的版本更改相同即可;
解决2. 也可以通过在
3. Press “OK” and try to Rebuild Project again.
解决3.File -> Settings -> Build, Execution, Deployment -> Build Tools -> Gradle -> Experimental
取消 Enable All test..勾选,但是mac版本没找到这个选项
解决4. 在根目录添加
gradle.taskGraph.whenReady {
tasks.each { task ->
if (task.name.equals('mockableAndroidJar')) {
task.enabled = false
}
}
}
- 7.当我们修改 compile 'com.android.support:appcompat-v7:25.0.0'版本时,会报很多value
主题找不到等错误
此时我们只需要修改compileSDK版本和这个V7后面版本一致即可
- 8.2018/8/1遇到问题 修改项目的app为其他名字时总是报找不到app in root project 经过好多天查找最终发现了问题所在,原来是
1. Go to File -> Settings -> Build, Execution, Deployment -> Compiler 2. Add to “Command-line Options”: 这里面全部去掉就可以了
#### 友情链接
[fly803/BaseProject](https://github.com/fly803/BaseProject)
<file_sep>/source/android_performance_optimization.md
###Android性能优化
####性能优化之界面性能优化
<file_sep>/source/metaprogramming.md
###元编程
元编程(Metaprogramming)是指某类计算机程序的编写,这类计算机程序编写或者操纵其他程序(或者自身)作为它们的数据,或者在运行时完成部分本应在编译时完成的工作。
很多情况下与手工编写全部代码相比工作效率更高。编写元程序的语言称之为元语言,被操作的语言称之为目标语言。一门语言同时也是自身的元语言的能力称之为反射。<file_sep>/source/aar.md
###arr生成
android Moudle项目Build后再build/out/arr/目录<file_sep>/source/sourceSet.gradle
android{
sourceSets {
main {
java {
srcDir 'src/java' // 指定源码目录
}
resources {
srcDir 'src/resources' //资源目录
}
}
}
//1. Eclipse 项目转换 AndroidStudio项目
// sourceSets {
// main {
// manifest.srcFile 'AndroidManifest.xml'
// java.srcDirs = ['src']
// aidl.srcDirs = ['src']
// renderscript.srcDirs = ['src']
// res.srcDirs = ['res']
// assets.srcDirs = ['assets']
// }
//
// instrumentTest.setRoot('tests')
// }
}
//2.我们可以在sourceSets中配置指定的Manifest文件(例如在组件化开发中, 我们需要针对debug与release模式下, 指定不同的Manifest文件)
sourceSets {
main {
if (isDebug.toBoolean()) {
manifest.srcFile 'src/main/debug/AndroidManifest.xml'
} else {
manifest.srcFile 'src/main/release/AndroidManifest.xml'
}
}
}
//3.如果我想在打包的时候, 不包含某些文件, 可以如下进行设置
sourceSets {
main {
java {
exclude '/test/**' // 不想包含文件的路径
}
resources {
exclude '/resource/**' // 不想包含的资源文件路径
}
// .....
}
}<file_sep>/source/project_library.md
###项目依赖
- 对于项目依赖android library的话,在这里需要使用gradle mulit project机制。 Mulit project设置是gradle约定的一种格式,如果需要编译某个项目之前,要先编译另外一个项目的时候,就需要用到。结构如下(来自于官方文档):
MyProject/
| settings.gradle
+ app/
| build.gradle
+ libraries/
+ lib1/
| build.gradle
+ lib2/
| build.gradle
- 需要在workplace目录下面创建settings.gradle 的文件,然后在里面写上:
include ':app', ':libraries:lib1', ':libraries:lib2'<file_sep>/source/operate_file.md
###Gradle 操作文件
#### 应用场景
当我们在开发一个SDK的时候,针对不同的用户会提供不同的接口,在不修改代码的情况下,就是通过
gradle配置过滤一些class文件,通常我的接口都会在统一初始化入口初始化配置,这个初始化配置,如果
我们只是单独去掉我们的接口class文件,显然初始化配置哪里会报错,NotFound,所以在初始化的地方我们也需要动态配置
这就需要这个初始化配置我们单独写一个类,而这个类我们用Gradle来根据需求生成
(当我们学习一个技术的时候如果应用不那就没有必要去学)
- 生成我们自己的配置,首先需要创建一个默认BuildConfigDefault类,具有默认配置
def replaceFirstIfTaged(str,tag,src,dest) {
if (str.contains(tag)) {
println "** ${tag} = ${dest};"
return str.replaceFirst(src, "${dest}")
}
return str
}
def replaceInt(key,src,value) {
def configFile = "${projectDir}/src/main/java/config/BuildConfigDefault.java"
def encoding = "UTF-8"
def lines = []
new File(configFile).newReader(encoding).eachLine { line ->
lines.add( replaceFirstIfTaged(line,key,src,value) )
}
BufferedWriter writer = new File(configFile).newWriter(encoding)
lines.each{ line -> writer.writeLine(line) }
writer.close()
}
/**
* 替换整数
* @param key
* @param value
* @return
*/
def initConfigInt(key,value) {
replaceInt(key,/\d+/,value)
}
//替换代码中字符串变量的值
def replaceStringMemberValue(str,tag,dest) {
if (str.contains(tag)) {
println "** ${tag} = ${dest};"
str=str.substring(0,str.indexOf("\"")+1);
str=str+dest+"\";";
return str
}
return str
}
def initConfigString(key,value) {
println("'projectDir="+projectDir)///Users/qiyue/GitProject/GradlePluginWS/GradlePlugin/app
def configFile = "${projectDir}/src/main/java/config/BuildConfigDefault.java"
def encoding = "UTF-8"
def lines = []
new File(configFile).newReader(encoding).eachLine { line ->
lines.add(replaceStringMemberValue(line,key,value))
}
BufferedWriter writer = new File(configFile).newWriter(encoding)
lines.each{ line -> writer.writeLine(line) }
writer.close()
}
initConfigString("name","configName")
initConfigInt("isOpen",4)
- 修改我们的serviceLoader
ext {
baseDevConfigs = [
'InstallManager'
,
'InstallManager'
,
'InstallManager'
,
'InstallManager'
,
'InstallManager'
]
}
/**
* 生成新的ServiceLoader.java文件内容
* @return
*/
def initServiceLoaderFile() {
def configFile = "${projectDir}/src/main/java/config/ServiceLoader.java"
File file = new File(configFile);
def oldlines = file.readLines();
def newlines = []
int startIndex = -1;
int i = 0;
def expConfig = "configs.put(";
def packageIndex = 0;
for (line in oldlines) {
if (!line.contains("configs.put")) {
newlines.add(line)
}
if (line.contains("package")) {
packageIndex = i;
}
if (line.contains("public ServiceLoader(){")){
startIndex = i;
}
i++;
}
if (!newlines.contains("import com.wangpos.test.inter.*;")) {
newlines.add(packageIndex + 1, "import com.wangpos.test.inter.*;");
}
//拼接默认配置内容
for (line in baseDevConfigs) {
def instance = " configs.put(${line}.class,${line}Impl.class);";
newlines.add(startIndex+1, instance);
}
PrintWriter writer = file.newPrintWriter("UTF-8")
newlines.each { line -> writer.println(line) }
writer.close()
}
initServiceLoaderFile()
### 文件操作基础
- 文件对象
在工程目录下,我们可以通过File的构造方法来快速定位一个文件并创建相应的File对象:
// 传入文件的相对路径
File configFile = new File('src/config.xml')
// 传入文件的绝对路径
configFile = new File(configFile.absolutePath)
// 通过相对路径构建一个 java.nio.file.Path 对象并传入
configFile = new File(Paths.get('src', 'config.xml'))
// 读取property变量构建 java.nio.file.Path 对象并传入
configFile = new File(Paths.get(System.getProperty('user.home')).resolve('global-config.xml'))
- 文件集合FileCollection
这个接口描述了针对一组文件的操作和属性。在Gradle中,许多类都继承了这一接口,例如依赖配置对象dependency configurations .
与创建File对象不同,创建FileCollection对象的唯一方式是通过 Project.files(java.lang.Object[])
方法,该方法的入参数目是任意多个,类型也可以是表示相对路径的字符串,File对象,甚至是集合,数组等。
FileCollection collection = files('src/file1.txt',
new File('src/file2.txt'),
['src/file3.txt', 'src/file4.txt'],
Paths.get('src', 'file5.txt'))
// 遍历文件集合
collection.each { File file ->
println file.name
}
// 将FileCollection对象转换为其他类型
Set set = collection.files
Set set2 = collection as Set
List list = collection as List
String path = collection.asPath
File file = collection.singleFile
File file2 = collection as File
// 对FileCollection进行加减操作
def union = collection + files('src/file3.txt')
def different = collection - files('src/file3.txt')
|
24d35368500c11ebd16028ae40bd419426842d97
|
[
"Markdown",
"Java",
"Gradle"
] | 44 |
Markdown
|
stefanJi/GradlePluginDevelop
|
b6619f74ca78daee195bb53acbf760bddb9ef9a9
|
0940474a69620c26f109ce5db96fd9cc417e28cc
|
refs/heads/main
|
<file_sep>#include <stdio.h> // Fichier contenant les en-têtes des fonctions standard d'entrées/sorties
#include <stdlib.h> // Fichier contenant les en-têtes de fonctions standard telles que malloc()
#include <string.h> // Fichier contenant les en-têtes de fonctions standard de gestion de chaînes de caractères
#include <unistd.h> // Fichier d'en-têtes de fonctions de la norme POSIX (dont gestion des fichiers : write(), close(), ...)
#include <sys/types.h> // Fichier d'en-têtes contenant la définition de plusieurs types et de structures primitifs (système)
#include <sys/socket.h> // Fichier d'en-têtes des fonctions de gestion de sockets
#include <netinet/in.h> // Fichier contenant différentes macros et constantes facilitant l'utilisation du protocole IP
#include <netdb.h> // Fichier d'en-têtes contenant la définition de fonctions et de structures permettant d'obtenir des informations sur le réseau (gethostbyname(), struct hostent, ...)
#include <memory.h> // Contient l'inclusion de string.h (s'il n'est pas déjà inclus) et de features.h
//include <errno.h> // Fichier d'en-têtes pour la gestion des erreurs (notamment perror())
#include <arpa/inet.h>
#include <stdbool.h>
#include <time.h>
#include <signal.h>
#include <sys/wait.h>
// constantes :
#define VERSION "1.04"
char time_request [128];
struct tm *pTime;
// Fonction qui renvoie un booléen selon si le string commence par le prefix.
//(utilisée ici pour verifier les entrées utilisateur)
_Bool starts_with(const char *restrict string, const char *restrict prefix) {
while(*prefix)
{
if(*prefix++ != *string++)
return 0;
}
return 1;
}
int creersock(u_short port, int alarmtime) {
// On cr?e deux variables enti?res
int sock, retour;
// On cr?e une variable adresse selon la structure sockaddr_in (la structure est d?crite dans sys/socket.h)
struct sockaddr_in adresse;
/*
La ligne suivante d?crit la cr?ation de la socket en tant que telle.
La fonction socket prend 3 param?tres :
- la famille du socket : la plupart du temps, les d?veloppeurs utilisent AF_INET pour l'Internet (TCP/IP, adresses IP sur 4 octets)
Il existe aussi la famille AF_UNIX, dans ce mode, on ne passe pas des num?ros de port mais des noms de fichiers.
- le protocole de niveau 4 (couche transport) utilis? : SOCK_STREAM pour TCP, SOCK_DGRAM pour UDP, ou enfin SOCK_RAW pour g?n?rer
des trames directement g?r?es par les couches inf?rieures.
- un num?ro d?signant le protocole qui fournit le service d?sir?. Dans le cas de socket TCP/IP, on place toujours ce param?tre a 0 si on utilise le protocole par d?faut.
*/
sock = socket(AF_INET, SOCK_STREAM,0);
// Si le code retourn? n'est pas un identifiant valide (la cr?ation s'est mal pass?e), on affiche un message sur la sortie d'erreur, et on renvoie -1
if (sock<0) {
perror ("ERREUR OUVERTURE");
return(-1);
}
// On compl?te les champs de la structure sockaddr_in :
// La famille du socket, AF_INET, comme cit? pr?c?dement
adresse.sin_family = AF_INET;
/* Le port auquel va se lier la socket afin d'attendre les connexions clientes. La fonction htonl()
convertit un entier long "htons" signifie "host to network long" conversion depuis l'ordre des bits de l'h?te vers celui du r?seau.
*/
adresse.sin_port = htons(port);
/* Ce champ d?signe l'adresse locale auquel un client pourra se connecter. Dans le cas d'une socket utilis?e
par un serveur, ce champ est initialis? avec la valeur INADDR_ANY. La constante INADDR_ANY utilis?e comme
adresse pour le serveur a pour valeur 0.0.0.0 ce qui correspond ? une ?coute sur toutes les interfaces locales disponibles.
*/
adresse.sin_addr.s_addr = INADDR_ANY;
int optval = 1;
if (setsockopt(sock, SOL_SOCKET, SO_REUSEADDR, &optval, sizeof optval) < 0){
perror("setsockopt(SO_REUSEADDR) failed");
}
struct timeval tv;
tv.tv_sec = alarmtime;
tv.tv_usec = 0;
if (setsockopt(sock, SOL_SOCKET, SO_RCVTIMEO, (const char*)&tv, sizeof tv)){
perror("setsockopt(SO_RCVTIMEO) failed");
}
if (setsockopt(sock, SOL_SOCKET, SO_SNDTIMEO, (const char*)&tv, sizeof tv)){
perror("setsockopt(SO_SNDTIMEO) failed");
}
/*
bind est utilis? pour lier la socket : on va attacher la socket cr?e au d?but avec les informations rentr?es dans
la structure sockaddr_in (donc une adresse et un num?ro de port).
Ce bind affecte une identit? ? la socket, la socket repr?sent?e par le descripteur pass? en premier argument est associ?e
? l'adresse pass?e en seconde position. Le dernier argument repr?sente la longueur de l'adresse.
Ce qui a pour but de rendre la socket accessible de l'ext?rieur (par getsockbyaddr)
*/
retour = bind (sock,(struct sockaddr *)&adresse,sizeof(adresse));
// En cas d'erreur lors du bind, on affiche un message d'erreur et on renvoie -1
if (retour<0) {
perror ("IMPOSSIBLE DE NOMMER LA SOCKET");
return(-1);
}
// Au final, on renvoie sock, qui contient l'identifiant ? la socket cr?e et attach?e.
return (sock);
}
void print_or_log(char* message, int mustLog, FILE* file){
if(mustLog) fputs(message, file);
printf("%s", message);
}
char* get_param(char* str, char* to_cut){
return str + strlen(to_cut);
}
void apache_display_log(char* their_ip, char* requete, char* string_to_log) {
time_t timestamp;
time(×tamp);
pTime = localtime( & timestamp);
strftime(time_request, 128, "[%d/%b/%Y:%H:%M:%S -0000]", pTime);
sprintf(string_to_log, "%s - - %s \"%s\" 404 100\n",their_ip, time_request, requete);
}
void display_log(char* our_ip, char* their_ip, char* requete, char* string_to_log){
time_t timestamp;
time(×tamp);
pTime = localtime( & timestamp);
strftime(time_request, 128, "%m/%d %H:%M:%S", pTime );
sprintf(string_to_log, "%s %s \t-> %s \t: %s\n", time_request, our_ip, their_ip, requete);
}
void iis_display_log(char* our_ip, char* their_ip, char* requete, char* string_to_log){
char time_request [128];
struct tm *pTime;
time_t timestamp;
time(×tamp);
pTime = localtime( & timestamp);
strftime(time_request, 128, "%D %H:%M:%S", pTime);
char* server_name = "server_name";
char method[64];
char target_resource[512];
char protocol_version[64];
char new_req[1024];
char* delim = " ";
int len_req;
int len;
strcpy(new_req, requete);
for (int i = 0; i < 3; i++){
len_req = strlen(new_req);
len = strcspn(new_req, delim);
if (strlen(method) <= 0){
strncpy(method, new_req, len);
}else if (strlen(target_resource) <= 0){
strncpy(target_resource, new_req, len);
}else{
strncpy(protocol_version, new_req, len);
}
strncpy(new_req, new_req + len+1, len_req - len);
}
char* user_agent = "Mozilla";
sprintf(string_to_log, "%s %s - %s %s %s %s - 404 100 100 0 %s %s - -\n", time_request, their_ip, server_name, our_ip, method, target_resource, protocol_version, user_agent);
}
void get_request(char request[], char* res){
int len;
const char delimiter[] = "\n";
len = strcspn(request, delimiter);
strncpy(res, request, len-(strlen(delimiter)-1));
}
int* getPorts(char* argvi, int nbPorts){
char* token;
const char coma[2] = ",";
int* res = malloc(/*sizeof(int) * */nbPorts);
int i = 0;
token = strtok(argvi, coma);
while(token != NULL){
res[i] = atoi(token);
token = strtok(NULL, coma);
i++;
}
return res;
}
<file_sep># Projet_Reseaux_2020
Projet d'analyse du script websnarf
<file_sep>all: websnarf
websnarf: websnarf.o
gcc -o websnarf -lnsl -Wall websnarf.o
websnarf.o: websnarf.c
gcc -c -Wall websnarf.c
<file_sep>/* COMMAND LINE
// ------------
//
// --port=## Listen on port ## instead of 80. This is mainly only
// good for developers making sure we've not broken something
// while testing on a "live" system.
//
// --timeout=## Wait for at most ## seconds for input from the remote end.
// Longer timeouts will capture a bit more traffic over slow
// links, but it will hold up the rest of the program.
//
// --log=FILE Append to the given file. The info saved there is also
// *always* written to the standard output, but this insures
// that we have a record even if the program is restarted.
//
// --max=## max length of captured request is ## characters. Most URL
// fetches are small ("GET / HTTP/1.0") but the Code Red
// ones are quite large. We don't care to record much more
// than one line's worth.
//
// --apache put logs in Apache format (should be the default)
//
// --version show current version number
*/
#include "websnarf.h"
int main (int argc, char *argv[]) {
int debug = 0;
char* logfile = "";
int logbyport = 0;
int mustLog = 0;
int port = 80; //TCP seulement
int alarmtime = 5; //secondes
int maxline = 666; //longueur max d'une ligne
int apache = 0; // option --apache
int iis = 0; // option --iis
int isDaemon = 0;
int isMultiPort = 0;
int nbPorts = 1;
int* ports_tab;
if(argc > 1){ // cas ou il y'a des paramètres saisis.
for(int i=1; i<argc; i++){
if( starts_with(argv[i],"--help") ){ // --help
printf("usage: %s [options]\n\n\t--timeout=<n>\twait at most <n> seconds on a read (default $alarmtime)\n\t--log=FILE\tappend output to FILE\n\t--port=<n>\tlisten on TCP port <n> (default $port/tcp)\n\t--multiport=<n>,...,<n>\tlisten on all the TCP port listed between ','. For each port, a new ./websnarf is executed with --port. ! If used with --daemon remember to kill all children when you want to stop the program !\n\t--max=<n>\tsave at most <n> chars of request (default $maxline chars)\n\t--debug\t\tturn on a bit of debugging (mainly for developers)\n\t--apache\tlogs are in Apache style\n\t--iis\t\tlogs are in IIS style\n\t--daemon\trun the program as daemon(in the same directory) ! If used with --multiport you will have to kill all children one by one !\n\t--version\tshow version info\n\n\t--help\tshow this listing\n",__FILE__);
fflush(stdout);
exit(0);
}
else if( starts_with(argv[i],"--log=") ){ // --log=FILE
logfile = get_param(argv[i],"--log=");
mustLog = 1;
}
else if( starts_with(argv[i],"--logbyport") ){ // --log=FILE_port
logbyport = 1;
}
else if( starts_with(argv[i],"--port=") ){ // --port=##
port = atoi(get_param(argv[i],"--port="));
}
else if( starts_with(argv[i],"--multiport=") ){ // --multiport=##,...,##
isMultiPort = 1;
char* str_ports = get_param(argv[i],"--multiport=");
for(int j = 1; j < strlen(str_ports); j++){
if(str_ports[j] == 44) nbPorts++; // 44 : code ASCII de la virgule (séparateur des numéros de ports)
}
ports_tab = getPorts(str_ports, nbPorts);
}
else if( starts_with(argv[i],"--timeout=") ){ // --timeout=##
alarmtime = atoi(get_param(argv[i],"--timeout="));
}
else if( starts_with(argv[i],"--max=") ){ // --max=##
maxline = atoi(get_param(argv[i],"--max="));
}
else if( starts_with(argv[i],"--debug") ){ // --debug
debug = 1;
}
else if( starts_with(argv[i],"--apache") ){ // --apache
apache = 1;
}
else if( starts_with(argv[i],"--iis") ){ // --iis
iis = 1;
}
else if( starts_with(argv[i],"--daemon") ){ // --daemon
isDaemon = 1;
}
else if( starts_with(argv[i],"--version") ){ // --version
printf("websnarf v%s -- http://www.unixwiz.net/tools/websnarf.html\n",VERSION);
fflush(stdout);
exit(0);
}
else{
if(! starts_with(argv[0],"websnarf") ){
printf("Erreur : tapez --help pour afficher l'aide.\n");
fflush(stdout);
exit(-1);
}
}
}
}
if(iis == 1 && apache == 1){
perror("Format de log IIS et apache incompatibles");
fflush(stdout);
exit(-1);
}
if(logbyport){
if(!mustLog){
printf("L'option --logbyport est inutile si l'option --log n'est pas utilisée\n");
fflush(stdout);
}
}
int status = 0;
if(isMultiPort){
int ret[nbPorts];
char process_name[20];
char parametres[12][1024];
char str_port_param[20];
int j;
// Pour chaque port, on créer un fils a port unique
for(int i = 0; i < nbPorts; i++){
// Copie des parametres
sprintf(process_name,"websnarf_%d",ports_tab[i]);
// Ajout du port a la liste des paramètres
for(j = 1; j < argc; j++){
if(! starts_with(argv[j],"--multiport=")){
strcpy(parametres[j], argv[j]);
} else {
sprintf(str_port_param, "--port=%d\n",ports_tab[i]);
strcpy(parametres[0], str_port_param);
}
}
// Création du fils
switch (ret[i] = fork()) {
case(pid_t) -1 :
perror("création impossible");
exit(-1);
break;
case(pid_t) 0 : // On est dans le fils
//printf("Création d'un nouveau fils (le %d-ème) au nom : %s,\tde pid : %d\n", i, process_name, getpid());
execl("./websnarf", process_name, parametres[0], parametres[1], parametres[2], parametres[3], parametres[4], parametres[5], parametres[6], parametres[7], parametres[8], parametres[9], parametres[10], parametres[11], NULL);
perror("execl fail");
exit(-1);
break;
default : // On est dans le pere
break;
}
}
for(int i = 0; i < nbPorts; i++){
//printf("process %d\twaiting for %d\n", getpid(), ret[i]);
waitpid(ret[i], &status, 0);
}
//printf("process %d is ded\n", getpid());
exit(1);
}
if(isDaemon){
daemon(1, 0);
}
// -----------------------------------------------------------------------
// Create the socket to listen on. It's a fatal error if we cannot listen
// on port 80, the most common reasons being (a) we're not root or
// (b) something else is already listening on that. Is Apache running?
//
int newsockfd, sock, s;
sock = creersock (port, alarmtime);
if (sock<0) {
perror ("ERREUR OUVERTURE");
fflush(stdout);
exit(-1);
}
// -----------------------------------------------------------------------
// CREATE LOGFILE
//
// ... but only if requested with --log=FILE
//
FILE* file;
char str_affiche[BUFSIZ];
char string_to_log[BUFSIZ];
char append_port_to_log[16];
if(strlen(logfile) > 0){
if(logbyport){
sprintf(append_port_to_log, "_%d", port);
strcat(logfile, append_port_to_log);
}
file = fopen(logfile, "rb+");
if(file == NULL){
file = fopen(logfile, "wb");
}
file = fopen(logfile, "a+");
sprintf(str_affiche, "# Now listening on port %d, and logging in %s\n",port, logfile);
print_or_log(str_affiche, mustLog, file);
fflush(stdout);
}
sprintf(str_affiche, "%s v%s listening on port %d (timeout=%d secs)\n", argv[0], VERSION, port, alarmtime);
print_or_log(str_affiche, mustLog, file);
fflush(stdout);
char our_ip [512];
char their_ip [512];
char request [2048];
char printed_requete[512];
struct sockaddr_in client_addr;
socklen_t clen = sizeof(client_addr);
struct sockaddr_in server_addr;
socklen_t slen = sizeof(server_addr);
struct hostent *client_name;
int retour_fils;
while(1){
listen (sock,5);
// Accepte une connexion TCP. Si après (alarmtime) secondes aucunes connexion n'est établie, retourne -1.
newsockfd = accept(sock, (struct sockaddr *)&client_addr, &clen);
if(newsockfd != -1){
getsockname(newsockfd, (struct sockaddr *)&server_addr, &slen);
getpeername(newsockfd, (struct sockaddr *)&client_addr, &clen);
// Calcul du string de notre IP
sprintf(our_ip, "%s", inet_ntoa(server_addr.sin_addr));
// Calcul du string de l'IP distante
sprintf(their_ip, "%s", inet_ntoa(client_addr.sin_addr));
//Pas de résolution de nom en local, c'est inutile
if(!(starts_with(their_ip, "192.168") || starts_with(their_ip, "172"))){
client_name = gethostbyaddr(&client_addr, sizeof(client_addr), AF_INET);
}
if(client_name != NULL) {
printf("Client name : %s\n", client_name->h_name);
}
if(debug){
sprintf(str_affiche, "--> accepted connection from %s\n",their_ip);
print_or_log(str_affiche, mustLog, file);
fflush(stdout);
}
switch ( retour_fils = fork() ) {
case(pid_t) -1 :
perror("fork du processus d'écoute d'un client échoué");
exit(-1);
case(pid_t) 0 :
close ( sock );
time_t start, end;
double elapsed;
time(&start);
// récupération de la requête effectuée par le "client"
do {
time(&end);
elapsed = difftime(end, start);
s = read(newsockfd, request, maxline);
if(s >= 1){
if(debug){
sprintf(str_affiche, " client ready to read, now reading\n");
print_or_log(str_affiche, mustLog, file);
fflush(stdout);
}
request[s] = 0;
if(debug){
sprintf(str_affiche, " got read #%d of [%ld]\n", 1,strlen(request) );
print_or_log(str_affiche, mustLog, file);
sprintf(str_affiche, "[%s]\n",request);
print_or_log(str_affiche, mustLog, file);
fflush(stdout);
}
// On ne prend que la première ligne de la requete
if(!iis){
get_request(request, printed_requete);
}
// Récupération de l'heure de la requête
if(apache) {
apache_display_log(their_ip, printed_requete, string_to_log);
} else if(iis) {
iis_display_log(our_ip, their_ip, printed_requete, string_to_log);
} else {
display_log(our_ip, their_ip, printed_requete, string_to_log);
}
sprintf(str_affiche,"%s", string_to_log);
print_or_log(str_affiche, mustLog, file);
fflush(stdout);
}
} while(s > 1 || elapsed < alarmtime);
// On a fini de lire la socket
close (newsockfd);
exit(1);
default:
break;
}
close(newsockfd);
}
}
close(sock);
return 0;
}
|
db983868fad3da9e8fd901b5a239479338520608
|
[
"Markdown",
"C",
"Makefile"
] | 4 |
C
|
TabaryM/Projet_Reseaux_2020
|
75aa726ff486dec26d47f40300ea6ef5b799121b
|
0bb12a1200ba2d877877914d35a4ed6f954daa56
|
refs/heads/master
|
<file_sep>app.config(function($stateProvider, $urlRouterProvider){
$stateProvider
.state('home', {
url: '/home',
templateUrl: 'home.html'
}).state('station', {
url: '/station/:id',
templateUrl: 'station.html'
}).state('accounts', {
url: '/accounts',
templateUrl: 'accounts.html'
}).state('signup', {
url: '/accounts/:id',
templateUrl: 'signup.html'
}).state('login', {
url: '/login',
templateUrl: 'login.html'
}).state('user', {
url: '/user',
templateUrl: 'user.html'
});
$urlRouterProvider.otherwise('/home');
});<file_sep>app.factory('$stations', ['$http', function($http){
var stations = {
nowPlayingId: '0'
};
stations.hosts = [
{
_id: 'fgi4bihjwkj34knwlkn34',
isPlaying: '',
avatar: 'http://animals.sandiegozoo.org/sites/default/files/2016-08/animals_hero_reindeer.jpg',
stationName: 'El Leon Sonidero',
subStationName: 'El mejor ruido del siglo!',
coverImage: 'http://animals.sandiegozoo.org/sites/default/files/2016-08/animals_hero_reindeer.jpg',
stationDescription: 'Las mejores cumbias sonideras, haz click para escuchar la estacion en vivo! No se olviden dejar sus commentarios!',
stationUrl: 'http://hyades.shoutca.st:8043/stream',
facebookUrl: 'http://www.facebook.com',
chatUrl: 'http://msgstar.herokuapp.com',
audioPlayer: '',
comments: [{
from: 'Luis',
message: 'muy buena estacion!'
},{
from: 'Jessie',
message: 'muy buena estacion!'
},{
from: 'Tori',
message: 'muy buena estacion!'
}]
},{
_id: 'bihjwkj34knwffsrwr3',
isPlaying: '',
avatar: 'img/tigreazul.jpg',
stationName: 'Tigre Sonidero',
subStationName: 'Estacion En Vivo',
coverImage: 'img/tigre2.jpg',
stationDescription: 'Las mejores cumbias sonideras, haz click para escuchar la estacion en vivo! No se olviden dejar sus commentarios!',
stationUrl: 'http://incompetech.com/music/royalty-free/mp3-royaltyfree/Miami%20Nights%20-%20Extended%20Theme.mp3',
facebookUrl: 'http://www.facebook.com',
chatUrl: 'http://msgstar.herokuapp.com',
audioPlayer: '',
comments: [{
from: 'Luis',
message: 'muy buena estacion!'
},{
from: 'Jessie',
message: 'muy buena estacion!'
},{
from: 'Tori',
message: 'muy buena estacion!'
}]
},{
_id: 'jk34bk34bk34kjk43434343',
isPlaying: '',
avatar: 'http://www.driven.co.nz/media/137800/forgiato-sema-ferrari-tec-2-4-1.jpg',
stationName: '<NAME>',
subStationName: 'Los Mejores del Milenio!',
coverImage: 'http://www.driven.co.nz/media/137800/forgiato-sema-ferrari-tec-2-4-1.jpg',
stationDescription: 'Las mejores cumbias sonideras, haz click para escuchar la estacion en vivo! No se olviden dejar sus commentarios!',
stationUrl: 'http://hyades.shoutca.st:8043/stream',
facebookUrl: 'http://www.facebook.com',
chatUrl: 'http://msgstar.herokuapp.com',
audioPlayer: '',
comments: [{
from: 'Luis',
message: 'muy buena estacion!'
},{
from: 'Jessie',
message: 'muy buena estacion!'
},{
from: 'Tori',
message: 'muy buena estacion!'
}]
}
];
stations.audioStatus = function(id, status){
stations.hosts[id].isPlaying = status;
return status;
};
stations.getAudioStatus = function(id){
return stations.hosts[id].isPlaying;
};
stations.getStation = function(id){
return stations.hosts[id];
};
stations.saveAudio = function(id, audioPlayer){
stations.hosts[id].audioPlayer = audioPlayer;
stations.hosts[id].audioPlayer.preload = 'metadata';
stations.hosts[id].audioPlayer.id = id;
return stations.hosts[id].audioPlayer;
};
stations.loadAudio = function(id){
return stations.hosts[id] ? stations.hosts[id].audioPlayer : false;
};
stations.setNowPlaying = function(id){
console.log('now playing id:', id);
stations.nowPlayingId = id;
};
return stations;
}]);
app.factory('$page', function(){
var page = { title: 'Radio Chat', isPlaying: ''};
page.setTitle = function(title){
page.title = title;
};
page.setAudioStatus = function(status){
page.isPlaying = status;
};
page.getTitle = function(){
return page.title;
};
page.getAudioStatus = function(){
return page.isPlaying;
};
return page;
});
app.directive('sidebarTemplate', function(){
return {
restrict: 'E',
scope: {},
controller: ['$scope', '$firebase', '$window', '$ionicHistory', function($scope, $firebase, $window, $ionicHistory){
$firebase.app.auth().onAuthStateChanged(function(user) {
$scope.user = $firebase.currentUser = $firebase.app.auth().currentUser;
$scope.$apply();
console.log($firebase.app.auth().currentUser);
if ($scope.user) {
$ionicHistory.nextViewOptions({
disableAnimate: false,
disableBack: true
});
$scope.isLoggedIn = true;
} else {
$scope.isLoggedIn = false;
}
});
$scope.signout = function(){
$firebase.app.auth().signOut();
};
}],
templateUrl: '../../templates/sidebar.html'
};
});
app.directive('navTitle', function(){
return {
restrict: 'E',
scope: {},
controller: ['$scope', '$page', '$stations', function($scope, $page, $stations){
$scope.page = $page.getTitle;
$scope.$watch(function(){ return $page.isPlaying; }, function(newStatus){
$scope.isPlaying = newStatus;
});
}],
templateUrl: '../../templates/navTitle.html'
};
});
app.directive('navAudioPlayer', function(){
return {
restrict: 'E',
scope: {},
controller: ['$scope', '$stations', '$page', function($scope, $stations, $page){
$scope.$watchCollection(function(){ return { stationId: $stations.nowPlayingId, isPlaying: $stations.getStation($stations.nowPlayingId).isPlaying }; }, function(newStation, oldStation){
var audio = $stations.loadAudio($stations.nowPlayingId);
var station = $stations.getStation(newStation.stationId);
$scope.isPlaying = newStation.isPlaying;
$page.setAudioStatus(newStation.isPlaying);
$scope.stop = function(){
audio.pause();
audio.load();
audio = $stations.saveAudio(newStation.stationId, new Audio(station.stationUrl));
$scope.isPlaying = $stations.audioStatus(newStation.stationId, false);
$stations.nowPlayingId = oldStation.stationId;
$page.setTitle('Radio Chat');
};
});
}],
templateUrl: '../../templates/navAudioPlayer.html'
};
});
app.directive('homeTemplate', function(){
return {
restrict: 'E',
scope: {},
controller: ['$scope', '$stations', function($scope, $stations){
$scope.stations = $stations.hosts;
}],
templateUrl: '../../templates/home.html'
};
});
app.directive('audioPlayer', function(){
return {
restrict: 'E',
scope: {stationId: '='},
controller: ['$scope', '$stations', '$stateParams', '$page', function($scope, $stations, $stateParams, $page){
var stationId = $stateParams.id || $scope.stationId;
var station = $stations.getStation(stationId);
var audio = $stations.loadAudio(stationId);
if(!audio) audio = $stations.saveAudio(stationId, new Audio(station.stationUrl));
this.station = station;
this.audio = audio;
this.$page = $page;
$scope.$watch(function(){ return station.isPlaying; }, function(newVal, oldVal){
$scope.isPlaying = $stations.getAudioStatus(stationId);
});
$scope.$watch(function(){ return $stations.nowPlayingId; }, function(newVal, oldVal){
if(oldVal === stationId && oldVal !== newVal){
audio.pause();
$scope.isPlaying = $stations.audioStatus(stationId, false);
}
});
$scope.isPlaying = $stations.getAudioStatus(stationId);
$scope.play = function(){
audio.play();
$scope.isPlaying = $stations.audioStatus(stationId, true);
$stations.setNowPlaying(stationId);
$page.setTitle(station.stationName);
$page.setAudioStatus($scope.isPlaying);
};
$scope.stop = function(){
audio.pause();
$scope.isPlaying = $stations.audioStatus(stationId, false);
$stations.setNowPlaying(stationId);
$page.setTitle('Radio Chat');
$page.setAudioStatus($scope.isPlaying);
};
$scope.toggleLeft = function() {
$ionicSideMenuDelegate.toggleLeft();
};
}],
templateUrl: '../../templates/audio-player.html'
};
});
app.directive('stationTemplate', function(){
return {
restrict: 'E',
scope: {},
controller: ['$scope', '$stations', '$stateParams', function($scope, $stations, $stateParams){
var stationId = $stateParams.id;
$scope.station = $stations.getStation(stationId);
}],
templateUrl: '../../templates/station.html'
};
});
|
8ff02c0c1c795beecd2c64f44f553a538ae4de18
|
[
"JavaScript"
] | 2 |
JavaScript
|
elite-groove/RadioChat
|
8cc7a6ef5e1cd3acce21d81ecc9f5e5f3c7256f5
|
3be4e4a99ae4de1d828b72855c63822d496bb907
|
refs/heads/master
|
<repo_name>miguelsolorio/midnight-theme<file_sep>/scripts/clean.js
const fs = require("fs");
const rimraf = require("rimraf");
const outputDirectory = "themes";
// clear dist folder
rimraf(outputDirectory, function () {
console.log(`deleted "${outputDirectory}" folder`);
// re-create dist folder
fs.mkdirSync(outputDirectory);
});<file_sep>/README.md
<div align="center">
# 🌚 Midnight Theme
A dark theme for Visual Studio Code

</div>
|
9c7c6b3159a0d344d34b20f465355c8d7f41cf28
|
[
"JavaScript",
"Markdown"
] | 2 |
JavaScript
|
miguelsolorio/midnight-theme
|
5026848798740d802a5c22e498ae4f6320533c2c
|
ebaf867d4e61f21518acfe22cc7e37127fa9661c
|
refs/heads/master
|
<repo_name>ShawnMeng/data-placement<file_sep>/data-intensive queries.sql
#Query 98
explain analyze select i_item_desc
,i_category
,i_class
,i_current_price
,sum(ss_ext_sales_price) as itemrevenue
,sum(ss_ext_sales_price)*100/sum(sum(ss_ext_sales_price)) over
(partition by i_class) as revenueratio
from
store_sales
,item
,date_dim
where
ss_item_sk = i_item_sk
and i_category in ('Jewelry', 'Sports', 'Books')
and ss_sold_date_sk = d_date_sk
and d_date between cast('2001-01-12' as date)
and (cast('2001-01-12' as date) + 30)
group by
i_item_id
,i_item_desc
,i_category
,i_class
,i_current_price
order by
i_category
,i_class
,i_item_id
,i_item_desc
,revenueratio;
explain analyze select i_item_desc
,i_category
,i_class
,i_current_price
,sum(ss_ext_sales_price) as itemrevenue
,sum(ss_ext_sales_price)*100/sum(sum(ss_ext_sales_price)) over
(partition by i_class) as revenueratio
from
store_sales2
,item
,date_dim
where
ss_item_sk = i_item_sk
and i_category in ('Jewelry', 'Sports', 'Books')
and ss_sold_date_sk = d_date_sk
and d_date between cast('2001-01-12' as date)
and (cast('2001-01-12' as date) + 30)
group by
i_item_id
,i_item_desc
,i_category
,i_class
,i_current_price
order by
i_category
,i_class
,i_item_id
,i_item_desc
,revenueratio;
#Query 40
explain analyze select
w_state
,i_item_id
,sum(case when (cast(d_date as date) < cast ('1998-04-08' as date))
then cs_sales_price - coalesce(cr_refunded_cash,0) else 0 end) as sales_before
,sum(case when (cast(d_date as date) >= cast ('1998-04-08' as date))
then cs_sales_price - coalesce(cr_refunded_cash,0) else 0 end) as sales_after
from
catalog_sales left outer join catalog_returns on
(cs_order_number = cr_order_number
and cs_item_sk = cr_item_sk)
,warehouse
,item
,date_dim
where
i_current_price between 0.99 and 1.49
and i_item_sk = cs_item_sk
and cs_warehouse_sk = w_warehouse_sk
and cs_sold_date_sk = d_date_sk
and d_date between (cast ('1998-04-08' as date) - 30)
and (cast ('1998-04-08' as date) + 30)
group by
w_state,i_item_id
order by w_state,i_item_id
limit 100;
#Query 42
explain analyze select dt.d_year
,item.i_category_id
,item.i_category
,sum(ss_ext_sales_price)
from date_dim dt
,store_sales
,item
where dt.d_date_sk = store_sales.ss_sold_date_sk
and store_sales.ss_item_sk = item.i_item_sk
and item.i_manager_id = 1
and dt.d_moy=12
and dt.d_year=1998
group by dt.d_year
,item.i_category_id
,item.i_category
order by
sum(ss_ext_sales_price) desc,dt.d_year
,item.i_category_id
,item.i_category
limit 100;
explain analyze select dt.d_year
,item.i_category_id
,item.i_category
,sum(ss_ext_sales_price)
from date_dim dt
,store_sales1
,item
where dt.d_date_sk = store_sales1.ss_sold_date_sk
and store_sales1.ss_item_sk = item.i_item_sk
and item.i_manager_id = 1
and dt.d_moy=12
and dt.d_year=1998
group by dt.d_year
,item.i_category_id
,item.i_category
order by
sum(ss_ext_sales_price) desc,dt.d_year
,item.i_category_id
,item.i_category
limit 100;
#Query 22
explain analyze select i_product_name
,i_brand
,i_class
,i_category
,avg(inv_quantity_on_hand) qoh
from inventory
,date_dim
,item
,warehouse
where inv_date_sk=d_date_sk
and inv_item_sk=i_item_sk
and inv_warehouse_sk = w_warehouse_sk
and d_month_seq between 1176 and 1176 + 11
group by i_product_name
,i_brand
,i_class
,i_category
order by qoh, i_product_name, i_brand, i_class, i_category
limit 100;
explain analyze select i_product_name
,i_brand
,i_class
,i_category
,avg(inv_quantity_on_hand) qoh
from inventory1
,date_dim
,item
,warehouse
where inv_date_sk=d_date_sk
and inv_item_sk=i_item_sk
and inv_warehouse_sk = w_warehouse_sk
and d_month_seq between 1176 and 1176 + 11
group by i_product_name
,i_brand
,i_class
,i_category
order by qoh, i_product_name, i_brand, i_class, i_category
limit 100;
#Query 3
explain analyze select dt.d_year
,item.i_brand_id brand_id
,item.i_brand brand
,sum(ss_ext_sales_price) sum_agg
from date_dim dt
,store_sales1
,item
where dt.d_date_sk = store_sales1.ss_sold_date_sk
and store_sales1.ss_item_sk = item.i_item_sk
and item.i_manufact_id = 436
and dt.d_moy=12
group by dt.d_year
,item.i_brand
,item.i_brand_id
order by dt.d_year
,sum_agg desc
,brand_id
limit 100;
#Query 20
explain analyze select i_item_desc
,i_category
,i_class
,i_current_price
,sum(cs_ext_sales_price) as itemrevenue
,sum(cs_ext_sales_price)*100/sum(sum(cs_ext_sales_price)) over
(partition by i_class) as revenueratio
from catalog_sales
,item
,date_dim
where cs_item_sk = i_item_sk
and i_category in ('Jewelry', 'Sports', 'Books')
and cs_sold_date_sk = d_date_sk
and d_date between cast('2001-01-12' as date)
and (cast('2001-01-12' as date) + 30)
group by i_item_id
,i_item_desc
,i_category
,i_class
,i_current_price
order by i_category
,i_class
,i_item_id
,i_item_desc
,revenueratio
limit 100;
#Query 21
explain analyze select *
from(select w_warehouse_name
,i_item_id
,sum(case when (cast(d_date as date) < cast ('1998-04-08' as date))
then inv_quantity_on_hand
else 0 end) as inv_before
,sum(case when (cast(d_date as date) >= cast ('1998-04-08' as date))
then inv_quantity_on_hand
else 0 end) as inv_after
from inventory1
,warehouse
,item
,date_dim
where i_current_price between 0.99 and 1.49
and i_item_sk = inv_item_sk
and inv_warehouse_sk = w_warehouse_sk
and inv_date_sk = d_date_sk
and d_date between (cast ('1998-04-08' as date) - 30)
and (cast ('1998-04-08' as date) + 30)
group by w_warehouse_name, i_item_id) x
where (case when inv_before > 0
then inv_after / inv_before
else null
end) between 2.0/3.0 and 3.0/2.0
order by w_warehouse_name
,i_item_id
limit 100;
#Query13
explain analyze select avg(ss_quantity)
,avg(ss_ext_sales_price)
,avg(ss_ext_wholesale_cost)
,sum(ss_ext_wholesale_cost)
from store_sales
,store
,customer_demographics
,household_demographics
,customer_address
,date_dim
where s_store_sk = ss_store_sk
and ss_sold_date_sk = d_date_sk and d_year = 2001
and((ss_hdemo_sk=hd_demo_sk
and cd_demo_sk = ss_cdemo_sk
and cd_marital_status = 'M'
and cd_education_status = '4 yr Degree'
and ss_sales_price between 100.00 and 150.00
and hd_dep_count = 3
)or
(ss_hdemo_sk=hd_demo_sk
and cd_demo_sk = ss_cdemo_sk
and cd_marital_status = 'D'
and cd_education_status = 'Primary'
and ss_sales_price between 50.00 and 100.00
and hd_dep_count = 1
) or
(ss_hdemo_sk=hd_demo_sk
and cd_demo_sk = ss_cdemo_sk
and cd_marital_status = 'U'
and cd_education_status = 'Advanced Degree'
and ss_sales_price between 150.00 and 200.00
and hd_dep_count = 1
))
and((ss_addr_sk = ca_address_sk
and ca_country = 'United States'
and ca_state in ('KY', 'GA', 'NM')
and ss_net_profit between 100 and 200
) or
(ss_addr_sk = ca_address_sk
and ca_country = 'United States'
and ca_state in ('MT', 'OR', 'IN')
and ss_net_profit between 150 and 300
) or
(ss_addr_sk = ca_address_sk
and ca_country = 'United States'
and ca_state in ('WI', 'MO', 'WV')
and ss_net_profit between 50 and 250
))
;
explain analyze select avg(ss_quantity)
,avg(ss_ext_sales_price)
,avg(ss_ext_wholesale_cost)
,sum(ss_ext_wholesale_cost)
from store_sales2
,store
,customer_demographics
,household_demographics
,customer_address
,date_dim
where s_store_sk = ss_store_sk
and ss_sold_date_sk = d_date_sk and d_year = 2001
and((ss_hdemo_sk=hd_demo_sk
and cd_demo_sk = ss_cdemo_sk
and cd_marital_status = 'M'
and cd_education_status = '4 yr Degree'
and ss_sales_price between 100.00 and 150.00
and hd_dep_count = 3
)or
(ss_hdemo_sk=hd_demo_sk
and cd_demo_sk = ss_cdemo_sk
and cd_marital_status = 'D'
and cd_education_status = 'Primary'
and ss_sales_price between 50.00 and 100.00
and hd_dep_count = 1
) or
(ss_hdemo_sk=hd_demo_sk
and cd_demo_sk = ss_cdemo_sk
and cd_marital_status = 'U'
and cd_education_status = 'Advanced Degree'
and ss_sales_price between 150.00 and 200.00
and hd_dep_count = 1
))
and((ss_addr_sk = ca_address_sk
and ca_country = 'United States'
and ca_state in ('KY', 'GA', 'NM')
and ss_net_profit between 100 and 200
) or
(ss_addr_sk = ca_address_sk
and ca_country = 'United States'
and ca_state in ('MT', 'OR', 'IN')
and ss_net_profit between 150 and 300
) or
(ss_addr_sk = ca_address_sk
and ca_country = 'United States'
and ca_state in ('WI', 'MO', 'WV')
and ss_net_profit between 50 and 250
))
;<file_sep>/TPC-DS queries.sql
#Query13
explain analyze select avg(ss_quantity)
,avg(ss_ext_sales_price)
,avg(ss_ext_wholesale_cost)
,sum(ss_ext_wholesale_cost)
from store_sales
,store
,customer_demographics
,household_demographics
,customer_address
,date_dim
where s_store_sk = ss_store_sk
and ss_sold_date_sk = d_date_sk and d_year = 2001
and((ss_hdemo_sk=hd_demo_sk
and cd_demo_sk = ss_cdemo_sk
and cd_marital_status = 'M'
and cd_education_status = '4 yr Degree'
and ss_sales_price between 100.00 and 150.00
and hd_dep_count = 3
)or
(ss_hdemo_sk=hd_demo_sk
and cd_demo_sk = ss_cdemo_sk
and cd_marital_status = 'D'
and cd_education_status = 'Primary'
and ss_sales_price between 50.00 and 100.00
and hd_dep_count = 1
) or
(ss_hdemo_sk=hd_demo_sk
and cd_demo_sk = ss_cdemo_sk
and cd_marital_status = 'U'
and cd_education_status = 'Advanced Degree'
and ss_sales_price between 150.00 and 200.00
and hd_dep_count = 1
))
and((ss_addr_sk = ca_address_sk
and ca_country = 'United States'
and ca_state in ('KY', 'GA', 'NM')
and ss_net_profit between 100 and 200
) or
(ss_addr_sk = ca_address_sk
and ca_country = 'United States'
and ca_state in ('MT', 'OR', 'IN')
and ss_net_profit between 150 and 300
) or
(ss_addr_sk = ca_address_sk
and ca_country = 'United States'
and ca_state in ('WI', 'MO', 'WV')
and ss_net_profit between 50 and 250
))
;
#Q16
explain analyze select
count(distinct cs_order_number) as "order count"
,sum(cs_ext_ship_cost) as "total shipping cost"
,sum(cs_net_profit) as "total net profit"
from
catalog_sales cs1
,date_dim
,customer_address
,call_center
where
d_date between '1999-2-01' and
(cast('1999-2-01' as date) + 60)
and cs1.cs_ship_date_sk = d_date_sk
and cs1.cs_ship_addr_sk = ca_address_sk
and ca_state = 'IL'
and cs1.cs_call_center_sk = cc_call_center_sk
and cc_county in ('Williamson County','Williamson County','Williamson County','Williamson County',
'Williamson County'
)
and exists (select *
from catalog_sales cs2
where cs1.cs_order_number = cs2.cs_order_number
and cs1.cs_warehouse_sk <> cs2.cs_warehouse_sk)
and not exists(select *
from catalog_returns cr1
where cs1.cs_order_number = cr1.cr_order_number)
order by count(distinct cs_order_number)
limit 100;
<file_sep>/DBlink-queries.sql
-- Migrating warehouse table.
insert into warehouse select * from
dblink('host=gho-admin user=postgres password=<PASSWORD> dbname=postgres',
'select * from warehouse') as t1(
w_warehouse_sk integer,
w_warehouse_id char(16),
w_warehouse_name varchar(20),
w_warehouse_sq_ft integer,
w_street_number char(10),
w_street_name varchar(60),
w_street_type char(15),
w_suite_number char(10),
w_city varchar(60),
w_county varchar(30),
w_state char(2),
w_zip char(10),
w_country varchar(20),
w_gmt_offset numeric(5,2)
);
-- Migrating promotion table.
insert into promotion select * from
dblink('host=gho-admin user=postgres password=<PASSWORD> dbname=postgres',
'select * from promotion') as t1(
p_promo_sk integer,
p_promo_id char(16),
p_start_date_sk integer,
p_end_date_sk integer,
p_item_sk integer,
p_cost numeric(15,2),
p_response_target integer,
p_promo_name char(50),
p_channel_dmail char(1),
p_channel_email char(1),
p_channel_catalog char(1),
p_channel_tv char(1),
p_channel_radio char(1),
p_channel_press char(1),
p_channel_event char(1),
p_channel_demo char(1),
p_channel_details varchar(100),
p_purpose char(15),
p_discount_active char(1)
);
-- Migrating item table.
insert into item select * from
dblink('host=gho-admin user=postgres password=<PASSWORD> dbname=postgres',
'select * from item') as t1(
i_item_sk integer,
i_item_id char(16),
i_rec_start_date date,
i_rec_end_date date,
i_item_desc varchar(200),
i_current_price numeric(7,2),
i_wholesale_cost numeric(7,2),
i_brand_id integer,
i_brand char(50),
i_class_id integer,
i_class char(50),
i_category_id integer,
i_category char(50),
i_manufact_id integer,
i_manufact char(50),
i_size char(20),
i_formulation char(20),
i_color char(20),
i_units char(10),
i_container char(10),
i_manager_id integer,
i_product_name char(50)
);
-- Migrating customer table.
insert into customer select * from
dblink('host=gho-admin user=postgres password=<PASSWORD> dbname=postgres',
'select * from customer') as t1(
c_customer_sk integer,
c_customer_id char(16),
c_current_cdemo_sk integer,
c_current_hdemo_sk integer,
c_current_addr_sk integer,
c_first_shipto_date_sk integer,
c_first_sales_date_sk integer,
c_salutation char(10),
c_first_name char(20),
c_last_name char(30),
c_preferred_cust_flag char(1),
c_birth_day integer,
c_birth_month integer,
c_birth_year integer,
c_birth_country varchar(20),
c_login char(13),
c_email_address char(50),
c_last_review_date char(10)
);
-- Migrating web_sales table.
insert into web_sales select * from
dblink('host=gho-admin user=postgres password=<PASSWORD> dbname=postgres',
'select * from web_sales') as t1(
ws_sold_date_sk integer ,
ws_sold_time_sk integer ,
ws_ship_date_sk integer ,
ws_item_sk integer ,
ws_bill_customer_sk integer ,
ws_bill_cdemo_sk integer ,
ws_bill_hdemo_sk integer ,
ws_bill_addr_sk integer ,
ws_ship_customer_sk integer ,
ws_ship_cdemo_sk integer ,
ws_ship_hdemo_sk integer ,
ws_ship_addr_sk integer ,
ws_web_page_sk integer ,
ws_web_site_sk integer ,
ws_ship_mode_sk integer ,
ws_warehouse_sk integer ,
ws_promo_sk integer ,
ws_order_number integer ,
ws_quantity integer ,
ws_wholesale_cost numeric(7,2) ,
ws_list_price numeric(7,2) ,
ws_sales_price numeric(7,2) ,
ws_ext_discount_amt numeric(7,2) ,
ws_ext_sales_price numeric(7,2) ,
ws_ext_wholesale_cost numeric(7,2) ,
ws_ext_list_price numeric(7,2) ,
ws_ext_tax numeric(7,2) ,
ws_coupon_amt numeric(7,2) ,
ws_ext_ship_cost numeric(7,2) ,
ws_net_paid numeric(7,2) ,
ws_net_paid_inc_tax numeric(7,2) ,
ws_net_paid_inc_ship numeric(7,2) ,
ws_net_paid_inc_ship_tax numeric(7,2) ,
ws_net_profit numeric(7,2)
);
-- Migrating catalog_sales table.
insert into catalog_sales select * from
dblink('host=gho-admin user=postgres password=<PASSWORD> dbname=postgres',
'select * from catalog_sales') as t1(
cs_sold_date_sk integer ,
cs_sold_time_sk integer ,
cs_ship_date_sk integer ,
cs_bill_customer_sk integer ,
cs_bill_cdemo_sk integer ,
cs_bill_hdemo_sk integer ,
cs_bill_addr_sk integer ,
cs_ship_customer_sk integer ,
cs_ship_cdemo_sk integer ,
cs_ship_hdemo_sk integer ,
cs_ship_addr_sk integer ,
cs_call_center_sk integer ,
cs_catalog_page_sk integer ,
cs_ship_mode_sk integer ,
cs_warehouse_sk integer ,
cs_item_sk integer ,
cs_promo_sk integer ,
cs_order_number integer ,
cs_quantity integer ,
cs_wholesale_cost numeric(7,2) ,
cs_list_price numeric(7,2) ,
cs_sales_price numeric(7,2) ,
cs_ext_discount_amt numeric(7,2) ,
cs_ext_sales_price numeric(7,2) ,
cs_ext_wholesale_cost numeric(7,2) ,
cs_ext_list_price numeric(7,2) ,
cs_ext_tax numeric(7,2) ,
cs_coupon_amt numeric(7,2) ,
cs_ext_ship_cost numeric(7,2) ,
cs_net_paid numeric(7,2) ,
cs_net_paid_inc_tax numeric(7,2) ,
cs_net_paid_inc_ship numeric(7,2) ,
cs_net_paid_inc_ship_tax numeric(7,2) ,
cs_net_profit numeric(7,2)
);
-- Migrating Inventory table.
insert into inventory select * from
dblink('host=gho-admin user=postgres password=<PASSWORD> dbname=postgres',
'select * from inventory') as t1(
inv_date_sk integer,
inv_item_sk integer,
inv_warehouse_sk integer,
inv_quantity_on_hand integer
);
<file_sep>/table_definitions.sql
-- Create table warehouse from TPC-DS.
create table warehouse (
w_warehouse_sk integer,
w_warehouse_id char(16),
w_warehouse_name varchar(20),
w_warehouse_sq_ft integer,
w_street_number char(10),
w_street_name varchar(60),
w_street_type char(15),
w_suite_number char(10),
w_city varchar(60),
w_county varchar(30),
w_state char(2),
w_zip char(10),
w_country varchar(20),
w_gmt_offset numeric(5,2)
);
-- Create table promotion from TPC-DS.
create table promotion (
p_promo_sk integer,
p_promo_id char(16),
p_start_date_sk integer,
p_end_date_sk integer,
p_item_sk integer,
p_cost numeric(15,2),
p_response_target integer,
p_promo_name char(50),
p_channel_dmail char(1),
p_channel_email char(1),
p_channel_catalog char(1),
p_channel_tv char(1),
p_channel_radio char(1),
p_channel_press char(1),
p_channel_event char(1),
p_channel_demo char(1),
p_channel_details varchar(100),
p_purpose char(15),
p_discount_active char(1)
);
-- Create table item from TPC-DS.
create table item (
i_item_sk integer,
i_item_id char(16),
i_rec_start_date date,
i_rec_end_date date,
i_item_desc varchar(200),
i_current_price numeric(7,2),
i_wholesale_cost numeric(7,2),
i_brand_id integer,
i_brand char(50),
i_class_id integer,
i_class char(50),
i_category_id integer,
i_category char(50),
i_manufact_id integer,
i_manufact char(50),
i_size char(20),
i_formulation char(20),
i_color char(20),
i_units char(10),
i_container char(10),
i_manager_id integer,
i_product_name char(50)
);
create table inventory(
inv_date_sk integer,
inv_item_sk integer,
inv_warehouse_sk integer,
inv_quantity_on_hand integer
);
|
bbc0148ec69a56d9a149437c6cdc65cec451c91f
|
[
"SQL"
] | 4 |
SQL
|
ShawnMeng/data-placement
|
ff1a497bbf184703d4479fefab6b65f3471acf12
|
86c322ee3d6ea24e3585c94fb8f317838639848c
|
refs/heads/master
|
<file_sep>echo "Welcome to the guessing game!"
#set variable defining number of files in current directory
#Doesn't include directories
nfiles=$(ls -p | grep -v / | wc -l)
#Function that checks whether response and nfiles match
function check-match {
if [[ $1 -eq $2 ]]
then
echo "Congrats! You guessed correctly."
count=$count+1
elif [[ $1 -gt $2 ]]
then
echo "Your guess was too high!"
elif [[ $1 -lt $2 ]]
then
echo "Your guess was too low!"
fi
}
#initiate while loop
#Will need to keep running the loop until the guess == NFiles
count=0
while [[ $count -lt 1 ]]
do
#Prompt user to enter their guess
echo "Guess the number of files in this directory:"
read response
check-match $response $nfiles
done
<file_sep>README.md:
touch README.md
echo "# Guessing Game" >> README.md
echo "## Unix Workbench Project" >> README.md
echo "" >> README.md
echo "Number of lines in guessing game program:" >> README.md
cat guessinggame.sh | wc -l >> README.md
echo "" >> README.md
echo "make was run on $$(date)" >> README.md
<file_sep># Guessing Game
## Unix Workbench Project
Number of lines in guessing game program:
32
make was run on Sun, May 10, 2020 1:08:47 PM
|
72947365149fef373df55296a6e7339eeda1b133
|
[
"Markdown",
"Makefile",
"Shell"
] | 3 |
Shell
|
jenna-labelle/UnixWorkbench_Project
|
55d90dd46bfa90422a9dbf3c3e38502e918be8ab
|
7f166808e97d76c08f668ef6c330273f6fdb4495
|
refs/heads/master
|
<repo_name>echo1319/kmeans<file_sep>/src/com/constantine/Main.java
/*
package com.constantine;
import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
public class Main {
public static void main(String[] args) throws IOException {
// read properties file:
// define datafile file
// define delimeter
// define attributes (by index of the array)
Extension extension = new Extension();
Properties properties = extension.readPropertiesFile("init.properties");
String dataFilename = properties.getProperty("datafile");
String delimeter = properties.getProperty("delimeter");
int classnameIndex = Integer.parseInt(properties.getProperty("classname"));
int[] variablesArray = extension.toIntArray(properties.getProperty("variables").split(","));
// File datafile = extension.getFile(dataFilename);
ArrayList<Pattern> patterns = new ArrayList<Pattern>();
System.out.println("-------------------------");
System.out.println(String.format("datafile: %s\ndelimeter: %s\nclassnameIndex: %s", dataFilename, delimeter, classnameIndex));
System.out.print("Variables: ");
for (int i = 0; i < variablesArray.length; i++) {
System.out.print(variablesArray[i] + " ");
}
System.out.println("\n-------------------------\n");
// read data file
// create data set
List<String> dataset = Files.readAllLines(Paths.get(properties.getProperty("datafile")), Charset.defaultCharset());
// System.out.println(dataset);
int k = 3;
double[] variables0 = new double[dataset.size()];
// double[] pointY = new double[dataset.size()];
int i = 0;
for (String line : dataset) {
String[] vars = line.split(delimeter);
variables0[i] = Double.parseDouble(vars[variablesArray[0]]);
// pointY[i] = Double.parseDouble(vars[variablesArray[1]]);
i++;
}
// for (double p : pointX)
// System.out.println(p);
double mean = extension.mean(variables0);
double standardDeviation = extension.standardDeviation(variables0, mean);
double[] percentileZs = extension.percentileZero(k);
double[] xs = extension.xs(percentileZs, mean, standardDeviation);
//array with lists ; each list-> partition points
List<Double>[] partitions;
System.out.println("Mean for patterns' variable 0: " + mean);
System.out.println("Deviation for patterns' variable 0: " + standardDeviation);
System.out.println("Percentiles Zero:");
for (double p : percentileZs)
System.out.println(p);
while (true) {
System.out.println("\nX points for percentiles:");
for (double x : xs)
System.out.println(x);
// for (List<Double> p : partitions)
// System.out.println(p);
partitions = extension.createPartitions(variables0, xs);
if (xs[0] == extension.calculateCenter(extension.toDoubleArray(partitions[0].toArray()))
&& xs[1] == extension.calculateCenter(extension.toDoubleArray(partitions[1].toArray()))
&& xs[2] == extension.calculateCenter(extension.toDoubleArray(partitions[2].toArray())))
break;
//calculate new data centers
System.out.println("\n\nCost1 " + extension.cost(extension.toDoubleArray(partitions[0].toArray()), xs[0]));
System.out.print("1. Old center was " + xs[0] + " ");
xs[0] = extension.calculateCenter(extension.toDoubleArray(partitions[0].toArray()));
System.out.print("but after recalculation new center will be " + xs[0] + "\n");
System.out.println("Cost2 " + extension.cost(extension.toDoubleArray(partitions[1].toArray()), xs[1]));
System.out.print("2. Old center was " + xs[1] + " ");
xs[1] = extension.calculateCenter(extension.toDoubleArray(partitions[1].toArray()));
System.out.print("but after recalculation new center will be " + xs[1] + "\n");
System.out.println("Cost3 " + extension.cost(extension.toDoubleArray(partitions[2].toArray()), xs[2]));
System.out.print("3. Old center was " + xs[2] + " ");
xs[2] = extension.calculateCenter(extension.toDoubleArray(partitions[2].toArray()));
System.out.print("but after recalculation new center will be " + xs[2] + "\n");
}
System.out.println("\n K-Centers stabilized at : " + xs[0] + " , " + xs[1] + " and " + xs[2]);
//For each element in the dataset, chose the closest centroid.
//Make that centroid the element 's label.
}
}
*/
<file_sep>/resources/init.properties
datafile=resources/input.txt
delimeter=,
variables=0,1,2,3
classname=4
K=3<file_sep>/src/com/constantine/utils/Utils.java
package com.constantine.utils;
import com.constantine.model.Pattern;
import org.apache.log4j.Logger;
import java.util.*;
public class Utils {
private Logger log = Logger.getLogger(this.getClass());
public Map<String, Double> sortByComparator(Map<String, Double> unsortMap, final boolean order) {
List<Map.Entry<String, Double>> list = new LinkedList<Map.Entry<String, Double>>(unsortMap.entrySet());
// Sorting the list based on values
Collections.sort(list, new Comparator<Map.Entry<String, Double>>() {
public int compare(Map.Entry<String, Double> o1, Map.Entry<String, Double> o2) {
if (order) {
return o1.getValue().compareTo(o2.getValue());
} else {
return o2.getValue().compareTo(o1.getValue());
}
}
});
// Maintaining insertion order with the help of LinkedList
Map<String, Double> sortedMap = new LinkedHashMap<String, Double>();
for (Map.Entry<String, Double> entry : list) {
sortedMap.put(entry.getKey(), entry.getValue());
}
return sortedMap;
}
public String getString(double[] data) {
String str = new String();
for (double s : data) {
str += s + " ";
}
return str;
}
public void get_min_max(double[] min, double max[], List<Pattern> patterns) {
int attrNum = max.length;
for (int i = 0; i < attrNum; i++) {
min[i] = patterns.get(1).getAttribute(i);
max[i] = patterns.get(1).getAttribute(i);
}
for (Pattern p : patterns) {
for (int i = 0; i < p.getAtrributeNum(); i++) {
if (p.getAttribute(i) < min[i]) min[i] = p.getAttribute(i);
if (p.getAttribute(i) > max[i]) max[i] = p.getAttribute(i);
}
}
for (int i = 0; i < attrNum; i++) {
log.debug(" Max value of attribute " + i + " is " + max[i] + " and Min value is " + min[i]);
}
}
public double[] getValues(int attributeNum, List<Pattern> inputPatterns) {
double[] values = new double[inputPatterns.size()];
int i = 0;
for (Pattern p : inputPatterns) {
values[i] = p.getAttribute(attributeNum);
i++;
}
return values;
}
class ValueComparator implements Comparator<String> {
Map<String, Double> base;
public ValueComparator(Map<String, Double> base) {
this.base = base;
}
// Note: this comparator imposes orderings that are inconsistent with equals.
public int compare(String a, String b) {
if (base.get(a) >= base.get(b)) {
return -1;
} else {
return 1;
} // returning 0 would merge keys
}
}
}
|
f7e8227f9610e82e04c37a7e94ccd3fc80f65b0c
|
[
"Java",
"INI"
] | 3 |
Java
|
echo1319/kmeans
|
a1ef13ff6bafa10d2ba00e8bf66dc4c4daecbb16
|
eab73d48f61ef2df05924f7caafa887598d39f62
|
refs/heads/master
|
<file_sep><?php
declare(strict_types=1);
namespace App;
use App\Http\Controllers\CacheController;
use Illuminate\Database\Eloquent\Model;
class Entry extends Model
{
//
protected $table = 'entries';
protected $guarded = ['id'];
public function fuelStation()
{
return $this->hasOne('App\FuelStation', 'id', 'fuel_station');
}
public function scopeRelated($query, $fuel_station, $has_gasoline, $has_diesel, $has_lpg, $ip)
{
return $query->where([['fuel_station','=',$fuel_station],['has_gasoline','=',$has_gasoline],['has_diesel','=',$has_diesel],['has_lpg','=',$has_lpg],['ip','!=',$ip]]);
}
public function scopeRelatedNoIP($query, $fuel_station, $has_gasoline, $has_diesel, $has_lpg)
{
return $query->where([['fuel_station','=',$fuel_station],['has_gasoline','=',$has_gasoline],['has_diesel','=',$has_diesel],['has_lpg','=',$has_lpg]]);
}
public function scopeLastHour($query)
{
return $query->where('created_at', '>=', \Carbon\Carbon::now()->subHour());
}
public function scopeLastDay($query)
{
return $query->where('created_at', '>=', \Carbon\Carbon::now()->subDay());
}
public function scopeNotUsed($query)
{
return $query->where('used', '=', '0');
}
public function scopeUsed($query)
{
return $query->where('used', '=', '1');
}
public function push()
{
$entries = Entry::relatedNoIP($this->fuel_station, $this->has_gasoline, $this->has_diesel, $this->has_lpg);
$entries->update(['used' => 1]);
$fuel_station = $this->fuelStation();
$fuel_station->update(['has_gasoline' => $this->has_gasoline,'has_diesel' => $this->has_diesel, 'has_lpg' => $this->has_lpg]);
$cacheController = new CacheController();
$cacheController->updateStations();
}
}
<file_sep><?php
declare(strict_types=1);
namespace App\Http\Controllers;
use App\Entry;
use App\FuelStation;
use Carbon\Carbon;
use Illuminate\Support\Facades\Storage;
use Illuminate\Support\Facades\URL;
class CacheController extends Controller
{
private function clearCloudflare($url_to_clear)
{
if (env('CLOUDFLARE_API_ENABLE', false) == 'true') {
$path_to_clear = $url_to_clear;
$auth_email = env('CLOUDFLARE_API_EMAIL');
$auth_key = env('CLOUDFLARE_API_KEY');
$api_endpoint = 'https://api.cloudflare.com/client/v4/zones/'.env('CLOUDFLARE_API_IDENTIFIER').'/purge_cache';
$headers = [
'X-Auth-Email' => $auth_email,
'X-Auth-Key' => $auth_key,
'content-type' => 'application/json',
];
$data = [
'files' => [$path_to_clear],
];
$client = new \GuzzleHttp\Client();
$client->request(
'POST',
$api_endpoint,
[
'headers' => $headers,
'json' => $data,
]
);
\usleep(500 * 1000); // Prevent hitting Cloudflare Max Requests Limit
}
}
private function updateBrandsStats()
{
$brands = FuelStation::brands();
$brands_stats = [];
foreach ($brands as $brand) {
$brands_stats[$brand] = [
'stations_total' => FuelStation::where([['brand','=',$brand]])->count(),
'stations_none' => FuelStation::where([['brand','=',$brand]])->empty()->count(),
'stations_partial' => FuelStation::where([['brand','=',$brand]])->partial()->count(),
'stations_all' => FuelStation::where([['brand','=',$brand]])->withAll()->count(),
'stations_no_gasoline' => FuelStation::where([['brand','=',$brand]])->noGasoline()->count(),
'stations_no_diesel' => FuelStation::where([['brand','=',$brand]])->noDiesel()->count(),
'stations_no_lpg' => FuelStation::where([['brand','=',$brand]])->noLPG()->count(),
'stations_sell_gasoline' => FuelStation::where([['brand','=',$brand]])->sellGasoline()->count(),
'stations_sell_diesel' => FuelStation::where([['brand','=',$brand]])->sellDiesel()->count(),
'stations_sell_lpg' => FuelStation::where([['brand','=',$brand]])->sellLPG()->count(),
];
}
Storage::disk('public')->put('data/stats_brands.json', \json_encode($brands_stats));
$this->clearCloudflare(URL::to('/storage/data/stats_brands.json'));
}
private function updateCounty($district, $county)
{
$county_data = [
'stations_total' => FuelStation::where([['district','=',$district], ['county','=',$county]])->count(),
'stations_none' => FuelStation::where([['district','=',$district], ['county','=',$county]])->empty()->count(),
'stations_partial' => FuelStation::where([['district','=',$district], ['county','=',$county]])->partial()->count(),
'stations_all' => FuelStation::where([['district','=',$district], ['county','=',$county]])->withAll()->count(),
'stations_no_gasoline' => FuelStation::where([['district','=',$district], ['county','=',$county]])->noGasoline()->count(),
'stations_no_diesel' => FuelStation::where([['district','=',$district], ['county','=',$county]])->noDiesel()->count(),
'stations_no_lpg' => FuelStation::where([['district','=',$district], ['county','=',$county]])->noLPG()->count(),
'stations_sell_gasoline' => FuelStation::where([['district','=',$district], ['county','=',$county]])->sellGasoline()->count(),
'stations_sell_diesel' => FuelStation::where([['district','=',$district], ['county','=',$county]])->sellDiesel()->count(),
'stations_sell_lpg' => FuelStation::where([['district','=',$district], ['county','=',$county]])->sellLPG()->count(),
];
Storage::disk('public')->put('data/stats_'.\ucfirst(\mb_strtolower($district)).'_'.$county.'.json', \json_encode($county_data));
$this->clearCloudflare(URL::to('/storage/data/stats_'.\ucfirst(\mb_strtolower($district)).'_'.$county.'.json'));
}
private function updateDistrict($district)
{
$district_data = [
'stations_total' => FuelStation::where([['district','=',$district]])->count(),
'stations_none' => FuelStation::where([['district','=',$district]])->empty()->count(),
'stations_partial' => FuelStation::where([['district','=',$district]])->partial()->count(),
'stations_all' => FuelStation::where([['district','=',$district]])->withAll()->count(),
'stations_no_gasoline' => FuelStation::where([['district','=',$district]])->noGasoline()->count(),
'stations_no_diesel' => FuelStation::where([['district','=',$district]])->noDiesel()->count(),
'stations_no_lpg' => FuelStation::where([['district','=',$district]])->noLPG()->count(),
'stations_sell_gasoline' => FuelStation::where([['district','=',$district]])->sellGasoline()->count(),
'stations_sell_diesel' => FuelStation::where([['district','=',$district]])->sellDiesel()->count(),
'stations_sell_lpg' => FuelStation::where([['district','=',$district]])->sellLPG()->count(),
];
Storage::disk('public')->put('data/stats_'.\ucfirst(\mb_strtolower($district)).'.json', \json_encode($district_data));
$this->clearCloudflare(URL::to('/storage/data/stats_'.\ucfirst(\mb_strtolower($district)).'.json'));
}
public function updateStats()
{
$places = [
];
$entries = [
'entries_last_hour' => 0,
'entries_last_day' => 0,
'entries_total' => 0,
];
$global = [
'stations_total' => 0,
'stations_none' => 0,
'stations_partial' => 0,
'stations_all' => 0,
'stations_no_gasoline' => 0,
'stations_no_diesel' => 0,
'stations_no_lpg' => 0,
'stations_sell_gasoline' => 0,
'stations_sell_diesel' => 0,
'stations_sell_lpg' => 0,
];
$entries['entries_last_hour'] = Entry::lastHour()->count();
$entries['entries_last_day'] = Entry::lastDay()->count();
$entries['entries_total'] = Entry::all()->count();
$global['stations_total'] = FuelStation::all()->count();
$global['stations_none'] = FuelStation::empty()->count();
$global['stations_partial'] = FuelStation::partial()->count();
$global['stations_all'] = FuelStation::withAll()->count();
$global['stations_no_gasoline'] = FuelStation::noGasoline()->count();
$global['stations_no_diesel'] = FuelStation::noDiesel()->count();
$global['stations_no_lpg'] = FuelStation::noLPG()->count();
$global['stations_sell_gasoline'] = FuelStation::sellGasoline()->count();
$global['stations_sell_diesel'] = FuelStation::sellDiesel()->count();
$global['stations_sell_lpg'] = FuelStation::sellLPG()->count();
Storage::disk('public')->put('data/stats_entries.json', \json_encode($entries));
Storage::disk('public')->put('data/stats_global.json', \json_encode($global));
$this->clearCloudflare(URL::to('/storage/data/stats_entries.json'));
$this->clearCloudflare(URL::to('/storage/data/stats_global.json'));
$districts = FuelStation::districts();
foreach ($districts as $district) {
if ($district != '') {
$formated_district = \ucfirst(\mb_strtolower($district));
$this->updateDistrict($district);
$counties = FuelStation::counties($district);
foreach ($counties as $county) {
if (! \array_key_exists($formated_district, $places)) {
$places[$formated_district] = [];
}
\array_push($places[$formated_district], $county);
$this->updateCounty($district, $county);
}
}
}
Storage::disk('public')->put('data/places.json', \json_encode($places));
$this->clearCloudflare(URL::to('/storage/data/places.json'));
$this->updateBrandsStats();
}
public function updateStations()
{
$json = FuelStation::all('id', 'name', 'brand', 'brand_management', 'long', 'lat', 'repa', 'sell_gasoline', 'sell_diesel', 'sell_lpg', 'has_gasoline', 'has_diesel', 'has_lpg', 'updated_at')->toJson();
Storage::disk('public')->put('data/cache.json', $json);
$this->clearCloudflare(URL::to('/storage/data/cache.json'));
}
public function updateStatsBeginnig()
{
$firstDate = Entry::get()->first()->created_at;
$firstDate = $firstDate->toImmutable();
$nextDate = $firstDate;
$nextDate = $nextDate->addHour();
$entries = [];
while ($firstDate <= Carbon::now()) {
$entries[$firstDate->toDateTimeString()] = Entry::where([['created_at', '>', $firstDate],
['created_at', '<', $nextDate], ])
->count();
$nextDate = $nextDate->addHour();
$firstDate = $firstDate->addHour();
}
Storage::disk('public')->put('data/stats_entries_hourly.json', \json_encode($entries));
$this->clearCloudflare(URL::to('/storage/data/stats_entries_hourly.json'));
}
public function updateEntriesLast12Hours()
{
$total = Entry::count('id');
$startDate = now()->subHours(12);
$records = Entry::query()
->where('created_at', '>=', $startDate->toDateTimeString())
->get()
->groupBy(function ($item) {
return Carbon::parse($item->created_at)->format('h');
})
;
$data = ['total' => $total, 'records' => []];
foreach ($records as $record) {
$data['records'][] = [$record->first()->created_at->format('H') ,$record->count()];
}
Storage::disk('public')->put('data/stats_entries_last12.json', \json_encode($data));
$this->clearCloudflare(URL::to('/storage/data/stats_entries_last12.json'));
}
}
<file_sep>## VOST Portugal - Já Não Dá Para Abastecer - Version 2.0
## Project setup
Install dependencies:
```sh
composer install
```
Copy the `.env` file:
```sh
cp .env.example .env
```
Generate an encryption key:
```sh
php artisan key:generate
```
### Database
Execute the migration and seeders:
```sh
php artisan migrate:refresh --seed
```
## Testing
To run the tests, execute:
```sh
vendor/bin/phpunit --dump-xdebug-filter xdebug-filter.php
vendor/bin/phpunit --prepend xdebug-filter.php
```
## Contributing
Contributions are always welcome, but before anything else, make sure you get acquainted with the [CONTRIBUTING](CONTRIBUTING.md) guide.
## Credits
- [VOST Portugal](https://github.com/vostpt)
## License
This project is open source software licensed under the [MIT LICENSE](LICENSE).
<file_sep><?php
declare(strict_types=1);
use Illuminate\Database\Migrations\Migration;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Support\Facades\Schema;
class CreateEntriesTable extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create('entries', function (Blueprint $table) {
$table->bigIncrements('id');
$table->bigInteger('fuel_station');
$table->boolean('has_gasoline');
$table->boolean('has_diesel');
$table->boolean('has_lpg');
$table->boolean('used');
$table->timestamps();
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::dropIfExists('entries');
}
}
<file_sep>#!/bin/bash
EXIT_STATUS=0
REGEX=".*php$"
#
# Coding style check per PHP file
#
for file in $(git diff --cached --name-only --diff-filter=ACM); do
if [[ $file =~ $REGEX ]]; then
composer cs-check $file
EXIT_STATUS=$?
if [ $EXIT_STATUS -ne 0 ]; then
echo "Issues detected! To fix, execute: composer cs-fix $file"
exit $EXIT_STATUS
fi
fi
done
if [ $EXIT_STATUS -eq 0 ]; then
echo "All good! No coding style issues found :)"
fi
exit $EXIT_STATUS<file_sep><?php
declare(strict_types=1);
namespace App\Http\Controllers;
use App\ExternalAuth;
use Illuminate\Http\Request;
use Illuminate\Support\Str;
class ExternalAuthController extends Controller
{
public function add()
{
return response()->view('externalauth/add');
}
public function create(Request $request)
{
$validatedData = $request->validate([
'key' => 'required',
'brand' => 'required',
]);
try {
$ext_auth = new ExternalAuth();
$ext_auth->key = $validatedData['key'];
$ext_auth->brand = $validatedData['brand'];
$ext_auth->secret = Str::random(64);
$ext_auth->save();
return redirect('panel/externalauth/list')->with('status', 'External Auth Created!');
} catch (Exception $e) {
return redirect('panel/externalauth/list')->with('status', 'Error Creating External Auth!');
}
}
public function list()
{
return response()->view('externalauth/list');
}
public function fetch_all()
{
$ext_auths = ExternalAuth::all();
return response()->json(['data' => $ext_auths]);
}
public function delete(Request $request)
{
$validatedData = $request->validate([
'id' => 'required|exists:external_auth',
]);
try {
$ext_auth = ExternalAuth::findOrFail($validatedData['id']);
$ext_auth->delete();
return redirect('panel/externalauth/list')->with('status', 'Autênticação externa eliminada!');
} catch (Exception $e) {
return redirect('panel/externalauth/list')->with('status', 'Erro ao eliminar autenticação externa!');
}
}
}
<file_sep>window.onload = function () {
let data = JSON.parse(Get("/storage/data/stats_brands.json"));
let brandsOfficialData = ['OZ Energia', 'Ecobrent', 'Prio','Bxpress'];
let brandParam = window.findGetParameter('marca');
if(!brandsOfficialData.includes(brandParam)){
brandParam = 'Prio';
}
data = data[brandParam];
renderChartsBrand(data);
document.getElementById('brand_name').textContent = `${brandParam} (Dados Oficiais)`;
};
<file_sep><?php
declare(strict_types=1);
namespace App\Http\Controllers;
use App\User;
use Auth;
use Illuminate\Http\Request;
use Illuminate\Support\Facades\Hash;
class UsersController extends Controller
{
public function add()
{
return response()->view('users/add');
}
public function password()
{
return response()->view('users/password');
}
public function create(Request $request)
{
$validatedData = $request->validate([
'name' => 'required',
'email' => 'required|email|unique:users|max:254',
'password' => '<PASSWORD>',
]);
try {
$user = new User();
$user->name = $validatedData['name'];
$user->password = <PASSWORD>($validatedData['password']);
$user->email = $validatedData['email'];
$user->save();
return redirect('panel/users/list')->with('status', 'User Created!');
} catch (Exception $e) {
return redirect('panel/users/list')->with('status', 'Error Creating User!');
}
}
public function list()
{
return response()->view('users/list');
}
public function fetch_verified()
{
$users_final = [];
$users = User::where('email_verified_at', '<>', 'NULL')->get();
foreach ($users as $user) {
$users_final[] = [
'id' => $user->id,
'name' => $user->name,
'email' => $user->email,
];
}
return response()->json(['data' => $users_final]);
}
public function fetch_not_verified()
{
$users_final = [];
$users = User::whereNull('email_verified_at')->get();
foreach ($users as $user) {
$users_final[] = [
'id' => $user->id,
'name' => $user->name,
'email' => $user->email,
];
}
return response()->json(['data' => $users_final]);
}
public function verify(Request $request)
{
$validatedData = $request->validate([
'id' => 'required|exists:users',
]);
try {
$user = User::findOrFail($validatedData['id']);
$user->markEmailAsVerified();
return redirect('panel/users/list')->with('status', 'Email de Utilizador validado!');
} catch (Exception $e) {
return redirect('panel/users/list')->with('status', 'Erro ao validar Email de Utilizador!');
}
}
public function delete(Request $request)
{
$validatedData = $request->validate([
'id' => 'required|exists:users',
]);
try {
$user = User::findOrFail($validatedData['id']);
$user->delete();
return redirect('users/list')->with('status', 'Utilizador eliminado!');
} catch (Exception $e) {
return redirect('users/list')->with('status', 'Erro ao eliminar Utilizador!');
}
}
public function updatePassword(Request $request)
{
$validatedData = $request->validate([
'id' => 'required|exists:users',
'current-password' => '<PASSWORD>',
'password' => '<PASSWORD>',
'password_confirmation' => '<PASSWORD>',
]);
try {
echo('here');
if (Auth::Check()) {
$current_password = Auth::User()->password;
if (Hash::check($validatedData['current-password'], $current_password)) {
$user = User::findOrFail($validatedData['id']);
$user->password = <PASSWORD>::make($validatedData['password']);
$user->save();
return back()->with('status', 'Password Atualizada!');
} else {
return back()->with('status', 'Password Atual Errada!');
}
} else {
return back()->with('status', 'Erro ao atualizar password!');
}
return back()->with('status', 'Password Atualizada!');
} catch (Exception $e) {
return back()->with('status', 'Erro ao atualizar password!');
}
}
}
<file_sep><?php
declare(strict_types=1);
// resources/lang/en/messages.php
return [
'Map' => 'Map',
'Stats' => 'Stats',
];
<file_sep><?php
declare(strict_types=1);
use Illuminate\Database\Migrations\Migration;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Support\Facades\Schema;
class CreateFuelStationsTable extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create('fuel_stations', function (Blueprint $table) {
$table->bigIncrements('id');
$table->unsignedBigInteger('id_station');
$table->string('name');
$table->string('brand');
$table->string('usage');
$table->string('type');
$table->string('district');
$table->string('county');
$table->string('address');
$table->decimal('long', 10, 7);
$table->decimal('lat', 10, 7);
$table->boolean('repa');
$table->boolean('sell_gasoline');
$table->boolean('sell_diesel');
$table->boolean('sell_lpg');
$table->boolean('has_gasoline');
$table->boolean('has_diesel');
$table->boolean('has_lpg');
$table->timestamps();
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::dropIfExists('fuel_stations');
}
}
<file_sep><?php
declare(strict_types=1);
return PhpCsFixer\Config::create()
->setRules([
'@PSR2' => true,
'psr4' => true,
'strict_param' => true,
'declare_strict_types' => true,
'fully_qualified_strict_types' => true,
'single_quote' => true,
'linebreak_after_opening_tag' => true,
'logical_operators' => true,
'lowercase_cast' => true,
'short_scalar_cast' => true,
'no_whitespace_in_blank_line' => true,
'no_unused_imports' => true,
'combine_consecutive_issets' => true,
'not_operator_with_successor_space' => true,
'combine_consecutive_unsets' => true,
'native_function_casing' => true,
'native_function_invocation' => true,
'no_alias_functions' => true,
'trailing_comma_in_multiline_array' => true,
'mb_str_functions' => true,
'ordered_imports' => [
'sort_algorithm' => 'alpha',
],
'return_type_declaration' => [
'space_before' => 'none',
],
'class_attributes_separation' => [
'elements' => [
'method',
'property',
],
],
'binary_operator_spaces' => [
'default' => 'align_single_space_minimal',
],
'array_syntax' => [
'syntax' => 'short',
],
'concat_space' => [
'spacing' => 'none',
],
]);<file_sep><?php
declare(strict_types=1);
namespace App\Console\Commands;
use App\FuelStation;
use Illuminate\Console\Command;
class FuelStationsGetDistrictFromCounties extends Command
{
/**
* The name and signature of the console command.
*
* @var string
*/
protected $signature = 'stations:districts';
/**
* The console command description.
*
* @var string
*/
protected $description = 'Get Fuel Stations District from County';
/**
* Create a new command instance.
*
* @return void
*/
public function __construct()
{
parent::__construct();
}
/**
* Execute the console command.
*
* @return mixed
*/
public function handle()
{
$fuel_stations = FuelStation::all();
$num_updated = 0;
$num_not_found = 0;
foreach ($fuel_stations as $fuel_station) {
$county = $fuel_station->county;
if ($county != '') {
if ($fuel_station->district == '') {
if ($county == 'Guimares') {
$county = 'Guimarães';
}
if ($county == 'Melgao') {
$county = 'Melgaço';
}
if ($county == 'Ofir') {
$county = 'Esposende';
}
if ($county == 'Taipas') {
$county = 'Guimarães';
}
if ($county == 'Açores') {
$district = 'Açores';
} else {
$json_id = \file_get_contents('https://api.vost.pt/v1/counties?search='.\urlencode($county));
$obj_id = \json_decode($json_id, true);
echo($county);
$county_id = $obj_id['data'][0]['id'];
$json_district = \file_get_contents('https://api.vost.pt/v1/counties/'.$county_id);
$obj_district = \json_decode($json_district, true);
$district = $obj_district['included'][0]['attributes']['name'];
}
$fuel_station->update(['district' => $district]);
$num_updated++;
\usleep(35000);
}
} else {
$num_not_found++;
}
}
echo('Updated: '.$num_updated.'; Not Found: '.$num_not_found);
}
}
<file_sep><?php
declare(strict_types=1);
namespace App\Http\Controllers;
class GraphsController extends Controller
{
public function stats()
{
return response()->view('graphs/stats');
}
public function brands()
{
return response()->view('graphs/brands');
}
public function entries()
{
return response()->view('graphs/entries');
}
}
<file_sep><?php
declare(strict_types=1);
use Illuminate\Database\Migrations\Migration;
class CreateOptionsValues extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
DB::table('options')->insert(
[
'name' => 'num_entries_required',
'value' => '10',
'description' => 'Número de entradas necessárias para validação automática',
]
);
DB::table('options')->insert(
[
'name' => 'stress_lockdown',
'value' => 0,
'description' => 'Bloquear acesso a partes com processamento elevado',
]
);
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
DB::table('options')->truncate();
}
}
<file_sep><?php
declare(strict_types=1);
namespace App\Console\Commands;
use App\FuelStation;
use Illuminate\Console\Command;
class FuelStationsUpdateFromURL2 extends Command
{
/**
* The name and signature of the console command.
*
* @var string
*/
protected $signature = 'stations:updateFromURL:2';
/**
* The console command description.
*
* @var string
*/
protected $description = 'Parse Fuel Stations from URL (Source #2)';
/**
* Create a new command instance.
*
* @return void
*/
public function __construct()
{
parent::__construct();
}
private function str_gettsv($input, $delimiter = "\t", $enclosure = '"', $escape = '\\')
{
return \str_getcsv($input, "\t");
}
/**
* Execute the console command.
*
* @return mixed
*/
public function handle()
{
$tsv = \file_get_contents(env('FUELSTATIONS_SOURCE2'));
$fetched_object = \array_map([$this, 'str_gettsv'], \explode("\n", $tsv));
unset($fetched_object[0]);
$num_updated = 0;
$num_created = 0;
foreach ($fetched_object as $key => $entry) {
$repa = '';
$source_id = 'spain-'.$key;
$brand = 'POSTO ES';
$name = $entry[20];
$lpg = false;
$long = $entry[7];
$lat = $entry[6];
$county = $entry[1];
$district = $entry[0];
$data = [
'repa' => $repa,
'source_id' => $source_id,
'brand' => $brand,
'name' => $name,
'sell_gasoline' => true,
'sell_diesel' => true,
'sell_lpg' => $lpg,
'long' => \floatval(\str_replace(',', '.', \str_replace('.', '', $long))),
'lat' => \floatval(\str_replace(',', '.', \str_replace('.', '', $lat))),
'county' => $county,
'district' => $district,
];
$fuel_station = FuelStation::where('source_id', [$source_id])->get()->first();
if ($fuel_station) {
$fuel_station->update($data);
$num_updated++;
} else {
$data['has_gasoline'] = true;
$data['has_diesel'] = true;
$data['has_lpg'] = $lpg;
$fuel_station = new FuelStation();
$fuel_station->fill($data);
$fuel_station->save();
$num_created++;
}
}
echo('Created: '.$num_created.'; Updated: '.$num_updated);
}
}
<file_sep># Contributing
First and foremost, we appreciate your interest in this project. This document contains essential information, should you want to contribute.
## Development discussion
For bugs, new features or improvements, open a new [issue](https://github.com/vostpt/api/issues/new).
## Which Branch?
Pull requests should always be done against the `master` branch.
## Coding Style
This project follows the [PSR-2](https://www.php-fig.org/psr/psr-2/) coding style guide and the [PSR-4](https://www.php-fig.org/psr/psr-4/) autoloader standard.
### PHP Coding Standards Fixer
A [PHP CS Fixer](https://cs.symfony.com/) script is hooked into the CI pipeline, so you'll be notified of any coding standard issue when pushing code.
#### Check
On each build, the `composer cs-check` script is executed to make sure the coding standards are followed.
#### Fix
If the build breaks due to coding standards, the following command fixes the issues:
```sh
composer cs-fix <file or directory name>
```
#### Pre-Commit Hook installation
To run the coding style check before each commit, install the bundled script in the project root with the following command:
```sh
cp pre-commit.sh .git/hooks/pre-commit
```
This prevents code from being committed if the check fails.
### PHPDoc
The following is a valid documentation block example:
```php
/**
* Index Occurrences.
*
* @param Index $request
* @param OccurrenceFilter $filter
* @param OccurrenceRepository $occurrenceRepository
*
* @throws \InvalidArgumentException
* @throws \OutOfBoundsException
* @throws \RuntimeException
*
* @return \Illuminate\Http\JsonResponse
*/
public function index(Index $request, OccurrenceFilter $filter, OccurrenceRepository $occurrenceRepository): JsonResponse
{
// ...
}
```
## Committing to git
Each commit **MUST** have a proper message describing the work that has been done.
This is called [Semantic Commit Messages](https://seesparkbox.com/foundry/semantic_commit_messages).
Here's what a commit message should look like:
```txt
feat(Occurrences): implement API client to fetch occurrence data
^--^ ^---------^ ^-------------------------------------------^
| | |
| | +-> Description of the work in the present tense.
| |
| +---------------> Scope of the work.
|
+--------------------> Type: chore, docs, feat, fix, hack, refactor, style, or test.
```
## Branching strategy
We will be using the **branch-per-issue** workflow.
This means, that for each open [issue](https://github.com/vostpt/api/issues), we'll create a corresponding **git** branch.
For instance, issue `#123` should have a corresponding `API-123/ShortTaskDescription` branch, which **MUST** branch off the latest code in `master`.
<file_sep><?php
declare(strict_types=1);
return [
/*
|--------------------------------------------------------------------------
| Password Reset Language Lines
|--------------------------------------------------------------------------
|
| The following language lines are the default lines which match reasons
| that are given by the password broker for a password update attempt
| has failed, such as for an invalid token or invalid new password.
|
*/
'password' => 'As Passwords têm de ter no minimo seis caracteres e serem iguais.',
'reset' => 'Foi dado reset à tua password!',
'sent' => 'Enviamos um email com o link para dares reset à tua password!',
'token' => 'Este token é inválido.',
'user' => 'Nao encontramos nenhuma utilizado com esse email.',
];
<file_sep><?php
declare(strict_types=1);
namespace App\Http\Controllers;
use App\ExternalAuth;
use App\FuelStation;
use Illuminate\Http\Request;
use Illuminate\Support\Facades\Input;
class APIController extends Controller
{
public function home()
{
return response()->view('api/home');
}
public function info(Request $request)
{
if ($request->has('key') && $request->has('secret')) {
$ext_auth = ExternalAuth::where([['key', '=', $request->input('key')],['secret', '=', $request->input('secret')]]);
if ($ext_auth->count() > 0) {
$ext_auth = $ext_auth->first();
$info = ['brand' => $ext_auth->brand];
return response()->json($info);
}
}
return response()->json([]);
}
public function fetch(Request $request)
{
if ($request->has('key') && $request->has('secret')) {
$ext_auth = ExternalAuth::where([['key', '=', $request->input('key')],['secret', '=', $request->input('secret')]]);
if ($ext_auth->count() > 0) {
$ext_auth = $ext_auth->first();
$stations = [];
if ($ext_auth->brand == 'READONLY' || $ext_auth->brand == 'WRITEREAD') {
$stations = FuelStation::all();
} else {
$stations = FuelStation::where('brand', '=', $ext_auth->brand)->orWhere('brand_management', '=', $ext_auth->brand)->get();
}
$output = [];
foreach ($stations as $station) {
$output[] = [
'id' => $station->id,
'name' => $station->name,
'brand' => $station->brand,
'sell_gasoline' => $station->sell_gasoline,
'sell_diesel' => $station->sell_diesel,
'sell_lpg' => $station->sell_lpg,
'has_gasoline' => $station->has_gasoline,
'has_diesel' => $station->has_diesel,
'has_lpg' => $station->has_lpg,
'lat' => $station->long,
'long' => $station->lat,
'repa' => $station->repa,
'county' => $station->county,
'district' => $station->district,
];
}
return response()->json($output);
}
}
return response()->json([]);
}
public function push(Request $request)
{
$output = ['success' => 0];
if ($request->has('key') && $request->has('secret') && $request->has('has_gasoline') && $request->has('has_diesel') && $request->has('has_lpg') && $request->has('id')) {
$ext_auth = ExternalAuth::where([['key', '=', $request->input('key')],['secret', '=', $request->input('secret')]]);
if ($ext_auth->count() > 0) {
$ext_auth = $ext_auth->first();
$fuel_station = FuelStation::where('id', '=', $request->input('id'));
if ($fuel_station->count() > 0) {
$fuel_station = $fuel_station->first();
if (($fuel_station->brand == $ext_auth->brand) || ($fuel_station->brand_management == $ext_auth->brand) || ($ext_auth->brand == 'WRITEREAD')) {
$has_gasoline = \intval($request->input('has_gasoline'));
$has_diesel = \intval($request->input('has_diesel'));
$has_lpg = \intval($request->input('has_lpg'));
if (! \is_nan($has_gasoline) && ! \is_nan($has_diesel) && ! \is_nan($has_lpg)) {
if ($has_gasoline > 1) {
$has_gasoline = 1;
} elseif ($has_gasoline < 0) {
$has_gasoline = 0;
}
if ($has_diesel > 1) {
$has_diesel = 1;
} elseif ($has_diesel < 0) {
$has_diesel = 0;
}
if ($has_lpg > 1) {
$has_lpg = 1;
} elseif ($has_lpg < 0) {
$has_lpg = 0;
}
$fuel_station->update([
'has_gasoline' => $has_gasoline,
'has_diesel' => $has_diesel,
'has_lpg' => $has_lpg,
]);
$fuel_station->save();
$cacheController = new CacheController();
$cacheController->updateStations();
$output = ['success' => 1];
}
}
}
}
}
return response()->json($output);
}
public function add(Request $request)
{
$output = ['success' => 0];
if ($request->has('key') && $request->has('secret') && $request->has('name') && $request->has('brand') && $request->has('repa') && $request->has('sell_gasoline') && $request->has('sell_diesel') && $request->has('sell_lpg') && $request->has('county') && $request->has('district') && $request->has('lat') && $request->has('long')) {
$ext_auth = ExternalAuth::where([['key', '=', $request->input('key')],['secret', '=', $request->input('secret')]]);
if ($ext_auth->count() > 0) {
$ext_auth = $ext_auth->first();
$sell_gasoline = \intval($request->input('sell_gasoline'));
$sell_diesel = \intval($request->input('sell_diesel'));
$sell_lpg = \intval($request->input('sell_lpg'));
if (! \is_nan($sell_gasoline) && ! \is_nan($sell_diesel) && ! \is_nan($sell_lpg)) {
$brand = $request->input('brand');
$management_brand = '';
if ($brand != $ext_auth->brand && $ext_auth->brand != 'WRITEREAD') {
$management_brand = $ext_auth->brand;
}
if ($sell_gasoline > 1) {
$sell_gasoline = 1;
} elseif ($sell_gasoline < 0) {
$sell_gasoline = 0;
}
if ($sell_diesel > 1) {
$sell_diesel = 1;
} elseif ($sell_diesel < 0) {
$sell_diesel = 0;
}
if ($sell_lpg > 1) {
$sell_lpg = 1;
} elseif ($sell_lpg < 0) {
$sell_lpg = 0;
}
$county = $request->input('county');
if ($county == null) {
$county = '';
}
$district = $request->input('district');
if ($district == null) {
$district = '';
}
$repa = $request->input('repa');
if ($repa == null) {
$repa = '';
}
$data = [
'repa' => $repa,
'source_id' => 'api',
'brand' => $request->input('brand'),
'brand_management' => $management_brand,
'name' => $request->input('name'),
'sell_gasoline' => $sell_gasoline,
'sell_diesel' => $sell_diesel,
'sell_lpg' => $sell_lpg,
'has_gasoline' => $sell_gasoline,
'has_diesel' => $sell_diesel,
'has_lpg' => $sell_lpg,
'long' => \floatval($request->input('lat')),
'lat' => \floatval($request->input('long')),
'county' => $county,
'district' => $district,
];
$fuel_station = new FuelStation();
$fuel_station->fill($data);
$fuel_station->save();
$cacheController = new CacheController();
$cacheController->updateStations();
$output = ['success' => 1];
}
}
}
return response()->json($output);
}
public function change(Request $request)
{
$output = ['success' => 0];
if ($request->has('key') && $request->has('secret') && $request->has('name') && $request->has('brand') && $request->has('repa') && $request->has('sell_gasoline') && $request->has('sell_diesel') && $request->has('sell_lpg') && $request->has('id') && $request->has('county') && $request->has('district') && $request->has('lat') && $request->has('long')) {
$ext_auth = ExternalAuth::where([['key', '=', $request->input('key')],['secret', '=', $request->input('secret')]]);
if ($ext_auth->count() > 0) {
$ext_auth = $ext_auth->first();
$fuel_station = FuelStation::where('id', '=', $request->input('id'));
if ($fuel_station->count() > 0) {
$fuel_station = $fuel_station->first();
if (($fuel_station->brand == $ext_auth->brand) || ($fuel_station->brand_management == $ext_auth->brand) || ($ext_auth->brand == 'WRITEREAD')) {
$brand = $request->input('brand');
$management_brand = '';
if ($brand != $ext_auth->brand && $ext_auth->brand != 'WRITEREAD') {
$management_brand = $ext_auth->brand;
}
$sell_gasoline = \intval($request->input('sell_gasoline'));
$sell_diesel = \intval($request->input('sell_diesel'));
$sell_lpg = \intval($request->input('sell_lpg'));
if (! \is_nan($sell_gasoline) && ! \is_nan($sell_diesel) && ! \is_nan($sell_lpg)) {
if ($sell_gasoline > 1) {
$sell_gasoline = 1;
} elseif ($sell_gasoline < 0) {
$sell_gasoline = 0;
}
if ($sell_diesel > 1) {
$sell_diesel = 1;
} elseif ($sell_diesel < 0) {
$sell_diesel = 0;
}
if ($sell_lpg > 1) {
$sell_lpg = 1;
} elseif ($sell_lpg < 0) {
$sell_lpg = 0;
}
$county = $request->input('county');
if ($county == null) {
$county = '';
}
$district = $request->input('district');
if ($district == null) {
$district = '';
}
$repa = $request->input('repa');
if ($repa == null) {
$repa = '';
}
$data = [
'repa' => $repa,
'source_id' => 'api',
'brand' => $request->input('brand'),
'brand_management' => $management_brand,
'name' => $request->input('name'),
'sell_gasoline' => $sell_gasoline,
'sell_diesel' => $sell_diesel,
'sell_lpg' => $sell_lpg,
'has_gasoline' => $sell_gasoline,
'has_diesel' => $sell_diesel,
'has_lpg' => $sell_lpg,
'long' => \floatval($request->input('lat')),
'lat' => \floatval($request->input('long')),
'county' => $county,
'district' => $district,
];
$fuel_station->fill($data);
$fuel_station->save();
$cacheController = new CacheController();
$cacheController->updateStations();
$output = ['success' => 1];
}
}
}
}
}
return response()->json($output);
}
public function fetch_csv()
{
$headers = [
'Content-type' => 'text/csv',
'Content-Disposition' => 'attachment; filename=file.csv',
'Pragma' => 'no-cache',
'Cache-Control' => 'must-revalidate, post-check=0, pre-check=0',
'Expires' => '0',
];
$columns = [
'id',
'name',
'brand',
'lat',
'long',
'repa',
'sell_gasoline',
'sell_diesel',
'sell_lpg',
'has_gasoline',
'has_diesel',
'has_lpg',
'district',
'county',
'ense_id',
'ense_gasoline',
'ense_diesel',
];
$fuel_stations = FuelStation::all($columns);
$cb = function () use ($fuel_stations, $columns) {
$file = \fopen('php://output', 'w');
\fprintf($file, \chr(0xEF).\chr(0xBB).\chr(0xBF));
\fputcsv($file, $columns);
foreach ($fuel_stations as $fuel_station) {
if ($fuel_station['brand'] != 'POSTO ES') {
$long_save = $fuel_station['long'];
$fuel_station['long'] = $fuel_station['lat'];
$fuel_station['lat'] = $long_save;
\fputcsv($file, $fuel_station->toArray());
}
}
\fclose($file);
};
return response()->stream($cb, 200, $headers);
}
public function fetch_api_csv()
{
$password = Input::get('password');
if ($password != env('API_V1_DOWNLOAD_API_CSV')) {
return response('0', 200)->header('Content-Type', 'text/plain');
}
$headers = [
'Content-type' => 'text/csv',
'Content-Disposition' => 'attachment; filename=file.csv',
'Pragma' => 'no-cache',
'Cache-Control' => 'must-revalidate, post-check=0, pre-check=0',
'Expires' => '0',
];
$columns = [
'id',
'name',
'brand',
'lat',
'long',
'repa',
'sell_gasoline',
'sell_diesel',
'sell_lpg',
'has_gasoline',
'has_diesel',
'has_lpg',
'district',
'county',
'source_id',
];
$fuel_stations = FuelStation::all($columns)->where('source_id', '=', 'api');
$cb = function () use ($fuel_stations, $columns) {
$file = \fopen('php://output', 'w');
\fprintf($file, \chr(0xEF).\chr(0xBB).\chr(0xBF));
\fputcsv($file, $columns);
foreach ($fuel_stations as $fuel_station) {
if ($fuel_station['brand'] != 'POSTO ES') {
$long_save = $fuel_station['long'];
$fuel_station['long'] = $fuel_station['lat'];
$fuel_station['lat'] = $long_save;
\fputcsv($file, $fuel_station->toArray());
}
}
\fclose($file);
};
return response()->stream($cb, 200, $headers);
}
}
<file_sep><?php
declare(strict_types=1);
namespace App\Console\Commands;
use App\FuelStation;
use Illuminate\Console\Command;
class MapEnseIds extends Command
{
/**
* The name and signature of the console command.
*
* @var string
*/
protected $signature = 'stations:map-ense-ids';
/**
* The console command description.
*
* @var string
*/
protected $description = 'map ense id to fuel_stations table ids';
/**
* Create a new command instance.
*
* @return void
*/
public function __construct()
{
parent::__construct();
}
/**
* Execute the console command.
*
* @return mixed
*/
public function handle()
{
$json = \file_get_contents('https://pastebin.com/raw/UgtUMMB2');
$obj = \json_decode($json, true);
foreach ($obj['ense'] as $point) {
$fuel_station = FuelStation::where('id', '=', $point['vost_id'])->get()->first();
if ($fuel_station) {
$data = [
'ense_id' => $point['ense_id'],
];
$fuel_station->fill($data);
$fuel_station->save();
}
}
}
}
<file_sep><?php
declare(strict_types=1);
namespace App\Console\Commands;
use App\FuelStation;
use Illuminate\Console\Command;
class FuelStationsUpdateFromENSE extends Command
{
/**
* The name and signature of the console command.
*
* @var string
*/
protected $signature = 'stations:ense';
/**
* The console command description.
*
* @var string
*/
protected $description = 'Get information for REPA from ENSE';
/**
* Create a new command instance.
*
* @return void
*/
public function __construct()
{
parent::__construct();
}
/**
* Execute the console command.
*
* @return mixed
*/
public function handle()
{
$json = \file_get_contents('http://localhost:3000');
$obj = \json_decode($json, true);
foreach ($obj as $ense_station) {
$fuel_station = FuelStation::where('ense_id', [$ense_station])->get()->first();
if ($fuel_station) {
$data = [
'ense_gasoline' => $ense_station['Volume disponível: Gasolinas (Última atualização)'],
'ense_diesel' => $ense_station['Volume disponível: Gasóleos (Última atualização)'],
];
$fuel_station->fill($data);
$fuel_station->save();
}
}
}
}
<file_sep><?php
declare(strict_types=1);
namespace App\Http\Controllers;
use App\Entry;
use App\FuelStation;
use Auth;
use Illuminate\Http\Request;
use Illuminate\Support\Facades\DB;
class EntriesController extends Controller
{
public function list()
{
return response()->view('entries/list');
}
public function add(Request $request)
{
return response()->json(['success' => 0]);
$validatedData = $request->validate([
'fuel_station' => 'required|exists:fuel_stations,id',
'gasoline' => 'required|boolean',
'diesel' => 'required|boolean',
'lpg' => 'required|boolean',
'captcha' => 'required|string',
]);
try {
$url = 'https://www.google.com/recaptcha/api/siteverify';
$data = [
'secret' => env('GOOGLE_RECAPTCHA_V3_SECRET'),
'response' => $validatedData['captcha'],
];
$options = [
'http' => [
'header' => "Content-type: application/x-www-form-urlencoded\r\n",
'method' => 'POST',
'content' => \http_build_query($data),
],
];
$context = \stream_context_create($options);
$resultJSON = \file_get_contents($url, false, $context);
$result = \json_decode($resultJSON);
if ($result->success != true) {
return response()->json(['success' => 0]);
}
$station = FuelStation::findOrFail($validatedData['fuel_station']);
if ($station->brand == 'Prio' || $station->brand == 'OZ Energia' || $station->brand == 'Ecobrent' || $station->brand == 'Bxpress' || $station->brand == 'Tfuel') {
return response()->json(['success' => 0]);
}
$data = [
'has_gasoline' => $validatedData['gasoline'],
'has_diesel' => $validatedData['diesel'],
'has_lpg' => $validatedData['lpg'],
];
if ($user = Auth::user()) {
$station->fill($data);
$station->save();
$cacheController = new CacheController;
$cacheController->updateStations();
return response()->json(['success' => 1]);
} else {
$entry = new Entry();
$data['ip'] = \Request::ip();
$data['fuel_station'] = $validatedData['fuel_station'];
$entry->fill($data);
$entry->save();
return response()->json(['success' => 1]);
}
} catch (Exception $e) {
return response()->json(['success' => 0]);
}
}
public function push(Request $request)
{
$validatedData = $request->validate([
'id' => 'required|exists:entries',
]);
try {
$entry = Entry::findOrFail($validatedData['id']);
$entry->push();
return redirect('/panel/entries/list')->with('status', 'Entrada Validada Manualmente!');
} catch (Exception $e) {
return redirect('/panel/entries/list')->with('status', 'Erro ao validar entrada!');
}
}
public function fetch_pending()
{
$entries_final = [];
$entries = DB::table('entries')->select(DB::raw('MAX(id) as id,COUNT(DISTINCT(ip)) as total, fuel_station, has_gasoline, has_diesel, has_lpg'))->where('used', '=', 0)->where('created_at', '>=', \Carbon\Carbon::now()->subHour())->groupBy('fuel_station', 'has_gasoline', 'has_diesel', 'has_lpg')->get();
foreach ($entries as $entry) {
$fuel_station_string = FuelStation::find($entry->fuel_station)->string;
$entries_final[] = [
'id' => $entry->id,
'fuel_station' => $fuel_station_string,
'has_gasoline' => $entry->has_gasoline,
'has_diesel' => $entry->has_diesel,
'has_lpg' => $entry->has_lpg,
'count' => $entry->total,
];
}
return response()->json(['data' => $entries_final]);
}
}
<file_sep><?php
declare(strict_types=1);
namespace App;
use Illuminate\Database\Eloquent\Model;
class Option extends Model
{
protected $table = 'options';
protected $guarded = ['name','description'];
protected $primaryKey = 'name';
public $incrementing = false;
protected $keyType = 'string';
}
<file_sep><?php
declare(strict_types=1);
namespace App\Providers;
use App\Entry;
use App\Observers\EntryObserver;
use Illuminate\Support\ServiceProvider;
class AppServiceProvider extends ServiceProvider
{
/**
* Register any application services.
*
* @return void
*/
public function register()
{
//
}
/**
* Bootstrap any application services.
*
* @return void
*/
public function boot()
{
//
Entry::observe(EntryObserver::class);
}
}
<file_sep><?php
declare(strict_types=1);
use Illuminate\Database\Migrations\Migration;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Support\Facades\Schema;
class AddENSEInfoToFuelStations extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::table('fuel_stations', function (Blueprint $table) {
$table->string('ense_id', 100)->default('0');
$table->string('ense_gasoline', 100)->default('0');
$table->string('ense_diesel', 100)->default('0');
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::table('fuel_stations', function (Blueprint $table) {
$table->dropColumn('ense_id');
$table->dropColumn('ense_gasoline');
$table->dropColumn('ense_diesel');
});
}
}
<file_sep>(function() {
// your page initialization code here
// the DOM will be available here
window.renderChartsGlobalStats = function (dataSourceUri) {
let data = JSON.parse(Get(dataSourceUri));
google.charts.load("current", {packages: ["corechart"]});
google.charts.setOnLoadCallback(drawChartsStations);
function drawChartsStations() {
let dataTable1 = new google.visualization.DataTable();
dataTable1.addColumn('string', 'Combustivel');
dataTable1.addColumn('number', 'Postos');
dataTable1.addRows([
['Todos', data.stations_all],
['Parte', data.stations_partial],
['Nenhum', data.stations_none]
]);
let options = {
pieHole: 0.2,
chartArea: {
top: 50,
height: "300px"
},
height: 300,
legend: {
position: "top",
alignment: "center",
},
pieSliceText: 'value-and-percentage',
tooltip: {
ignoreBounds: true
},
sliceVisibilityThreshold: 0
};
let optionsChart1 = Object.assign(options, {
colors: ['#8BC34A', '#f6bd00', '#f62317'],
backgroundColor: { fill:'transparent' }
});
let chart1 = new google.visualization.PieChart(document.getElementById('stations-chart-area'));
chart1.draw(dataTable1, optionsChart1);
document.getElementById('stations_total_number').innerHTML = data["stations_total"];
}
drawChartsFuelTypes(
data,
'gasoline-chart-area',
'diesel-chart-area',
'lpg-chart-area'
);
};
window.renderChartsBrand = function (data) {
if(data){
document.getElementById('brand_stations_total_number').textContent = data.stations_total;
drawChartsFuelTypes(
data,
'gasoline-chart-area-brand',
'diesel-chart-area-brand',
'lpg-chart-area-brand'
);
}
}
window.drawChartsFuelTypes = function (data,gasolineElId,dielseElId,lpgElId){
google.charts.load("current", {packages: ["corechart"]});
google.charts.setOnLoadCallback(drawChartForFuelTypes);
function drawChartForFuelTypes() {
{
let hasGasoline = data.stations_sell_gasoline - data.stations_no_gasoline;
let hasDiesel = data.stations_sell_diesel - data.stations_no_diesel;
let hasLpg = data.stations_sell_lpg - data.stations_no_lpg;
//
// GASOLINE
//
let barOptions = {
legend : {position: "top", alignment: "left"},
tooltip: { ignoreBounds:true},
bar: { groupWidth: '50%' },
isStacked: true,
top:0,
height: 130,
hAxis: {textPosition: 'none'},
backgroundColor: '#F8FAFC'
};
let dataTable2 = google.visualization.arrayToDataTable([
['Combustivel','Esgotado',{ role: 'annotation'},'Vende',{ role: 'annotation'},{role: 'style'}],
['Gasolina', data.stations_no_gasoline,data.stations_no_gasoline,hasGasoline,hasGasoline,'#AAAE43']
]);
let optionsChart2 = Object.assign(barOptions,{colors: ['#f62317','#AAAE43']});
let chart2 = new google.visualization.BarChart(document.getElementById(gasolineElId));
chart2.draw(dataTable2, optionsChart2);
//
// DIESEL
//
let dataTable3Diesel = google.visualization.arrayToDataTable([
['Combustivel','Esgotado',{ role: 'annotation'},'Vende',{ role: 'annotation'},{role: 'style'}],
['Gasoleo', data.stations_no_diesel,data.stations_no_diesel,hasDiesel,hasDiesel,'#DB6E3E'],
]);
let optionsChart3 = Object.assign(barOptions,{colors: ['#f62317','#DB6E3E']});
let chart3lpg = new google.visualization.BarChart(document.getElementById(dielseElId));
chart3lpg.draw(dataTable3Diesel, optionsChart3);
//
// LPG
//
let dataTable4 = google.visualization.arrayToDataTable([
['Combustivel', 'Esgotado', { role: 'annotation'}, 'Vende', { role: 'annotation'}, {role: 'style'},],
['GPL', data.stations_no_lpg, data.stations_no_lpg, hasLpg, hasLpg, '3D8CB1']
]);
let optionsChart4 = Object.assign(barOptions,{colors: ['#f62317','#3D8CB1']});
let chart4lpg = new google.visualization.BarChart(document.getElementById(lpgElId));
chart4lpg.draw(dataTable4, optionsChart4);
}
}
}
window.renderEntriesLast12Hours = function (dataSourceUri) {
let data = JSON.parse(Get(dataSourceUri));
google.charts.load("current", {packages: ["corechart"]});
google.charts.setOnLoadCallback(function () {
let dataTableData = [
['Hora', 'Submissões']
];
data.records.forEach((i)=>{
dataTableData.push(i);
});
let dataTable = google.visualization.arrayToDataTable(dataTableData);
let options = {
title: `Total geral de ${data.total} submissões`,
titlePosition: 'out',
legend: { position: 'none' },
height: 300,
titleTextStyle: {
fontSize: 14
},
backgroundColor: '#F8FAFC'
};
let chart = new google.visualization.LineChart(document.getElementById('entries-last12-hours'));
chart.draw(dataTable, options);
});
};
})();
<file_sep><?php
declare(strict_types=1);
// resources/lang/pt/messages.php
return [
'Map' => 'Mapa',
'Stats' => 'Estatisticas ',
];
<file_sep><?php
declare(strict_types=1);
namespace App\Http\Controllers;
use Illuminate\Http\Request;
class ErrorController extends Controller
{
public function new()
{
return response()->view('error/new');
}
public function edit()
{
return response()->view('error/edit');
}
public function submitted()
{
return response()->view('error/submitted');
}
public function push(Request $request)
{
$validatedData = $request->validate([
'id' => 'nullable|exists:fuel_stations,id',
'brand' => 'required|string',
'long' => 'required',
'lat' => 'required',
'sell_gasoline' => 'required',
'sell_diesel' => 'required',
'sell_lpg' => 'required',
'captcha' => 'required|string',
'vostie' => 'nullable|string',
'email' => 'required|email',
]);
try {
$url = 'https://www.google.com/recaptcha/api/siteverify';
$data = [
'secret' => env('GOOGLE_RECAPTCHA_V3_SECRET'),
'response' => $validatedData['captcha'],
];
$options = [
'http' => [
'header' => "Content-type: application/x-www-form-urlencoded\r\n",
'method' => 'POST',
'content' => \http_build_query($data),
],
];
$context = \stream_context_create($options);
$resultJSON = \file_get_contents($url, false, $context);
$result = \json_decode($resultJSON);
if ($result->success != true) {
return redirect('/')->with('status', 'Erro ao enviar informação!');
}
if (! $validatedData['id']) {
$validatedData['id'] = 0;
}
if (! $validatedData['vostie']) {
$validatedData['vostie'] = 'No';
}
$url = env('ERRORS_SPREADSHEET_LINK').'?id='.$validatedData['id'].'&lat='.$validatedData['lat'].'&long='.$validatedData['long'].'&brand='.\urlencode($validatedData['brand']).'&gasoline='.$validatedData['sell_gasoline'].'&diesel='.$validatedData['sell_diesel'].'&lpg='.$validatedData['sell_lpg'].'&vostie='.\urlencode($validatedData['vostie']).'&email='.\urlencode($validatedData['email']);
\file_get_contents($url);
return redirect('/error/submitted')->with('status', 'Informação Enviada!');
} catch (Exception $e) {
return redirect('/error/submitted')->with('status', 'Erro ao enviar informação!');
}
}
}
<file_sep><?php
declare(strict_types=1);
use Illuminate\Database\Migrations\Migration;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Support\Facades\Schema;
class AddManagementBrand extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::table('fuel_stations', function (Blueprint $table) {
$table->string('brand_management', 100)->default('');
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::table('fuel_stations', function (Blueprint $table) {
$table->dropColumn('brand_management');
});
}
}
<file_sep><?php
declare(strict_types=1);
namespace App\Http\Controllers;
use App\Option;
use Illuminate\Http\Request;
class OptionsController extends Controller
{
public function list()
{
return response()->view('options/list');
}
public function update(Request $request)
{
$validatedData = $request->validate([
'name' => 'required|exists:options',
'value' => 'required',
]);
try {
$option = Option::findOrFail($validatedData['name']);
unset($validatedData['name']);
$option->timestamps = false;
$option->update($validatedData);
return redirect('panel/options/list')->with('status', 'Opção Atualizada!');
} catch (Exception $e) {
return redirect('panel/options/list')->with('status', 'Erro ao atualizar opção!');
}
}
public function fetch_all()
{
$options = Option::all();
return response()->json(['data' => $options]);
}
}
<file_sep><?php
declare(strict_types=1);
namespace App\Http\Controllers;
use App\FuelStation;
use App\Option;
use Illuminate\Http\Request;
class FuelStationsController extends Controller
{
//
public function list()
{
if (Option::find('stress_lockdown')->value == 0) {
return response()->view('stations/list');
} else {
return response('Feature Disabled due to Stress Lockdown (Disable at Options)', 200)->header('Content-Type', 'text/plain');
}
}
public function update(Request $request)
{
$validatedData = $request->validate([
'id' => 'required|exists:fuel_stations',
'sells_gasoline' => 'required',
'sells_diesel' => 'required',
'sells_lpg' => 'required',
'repa' => 'required',
]);
try {
$fuel_station = FuelStation::findOrFail($validatedData['id']);
unset($validatedData['id']);
$fuel_station->update($validatedData);
$cacheController = new CacheController();
$cacheController->updateStations();
return redirect('panel/stations/list')->with('status', 'Estação Atualizada!');
} catch (Exception $e) {
return redirect('panel/stations/list')->with('status', 'Erro ao atualizar estação!');
}
}
public function updateAvailable(Request $request)
{
$validatedData = $request->validate([
'id' => 'required|exists:fuel_stations',
'has_gasoline' => 'boolean',
'has_diesel' => 'boolean',
'has_lpg' => 'boolean',
]);
try {
$fuel_station = FuelStation::findOrFail($validatedData['id']);
unset($validatedData['id']);
$fuel_station->update($validatedData);
$cacheController = new CacheController();
$cacheController->updateStations();
return response()->json(['sucess' => true]);
} catch (Exception $e) {
return response()->json(['sucess' => false]);
}
}
public function fetch_all()
{
$stations_final = [];
$stations = FuelStation::all();
foreach ($stations as $station) {
$stations_final[] = [
'id' => $station->id,
'name' => $station->name,
'brand' => $station->brand,
'sell_gasoline' => $station->sell_gasoline,
'sell_diesel' => $station->sell_diesel,
'sell_lpg' => $station->sell_lpg,
'repa' => $station->repa,
'lat' => $station->lat,
'long' => $station->long,
'has_gasoline' => $station->has_gasoline,
'has_diesel' => $station->has_diesel,
'has_lpg' => $station->has_lpg,
];
}
return response()->json(['data' => $stations_final]);
}
}
<file_sep><?php
declare(strict_types=1);
use Illuminate\Database\Migrations\Migration;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Support\Facades\Schema;
class RemoveUnusedDataFromFuelStations extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::table('fuel_stations', function (Blueprint $table) {
$table->dropColumn('usage');
$table->dropColumn('type');
$table->dropColumn('district');
$table->dropColumn('county');
$table->dropColumn('address');
$table->dropColumn('id_station');
$table->string('repa', 100)->change();
$table->string('source_id', 100);
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::table('fuel_stations', function (Blueprint $table) {
$table->string('usage');
$table->string('type');
$table->string('district');
$table->string('county');
$table->string('address');
$table->unsignedBigInteger('id_station');
$table->boolean('repa')->change();
$table->dropColumn('source_id');
});
}
}
<file_sep><?php
declare(strict_types=1);
return [
/*
|--------------------------------------------------------------------------
| Authentication Language Lines
|--------------------------------------------------------------------------
|
| The following language lines are used during authentication for various
| messages that we need to display to the user. You are free to modify
| these language lines according to your application's requirements.
|
*/
'failed' => 'Não foi possivel validar as tuas credênciais.',
'throttle' => 'Efectuaste demasiadas tentativas de inicio de sessão. Por favor tenta de novo em :seconds segundos.',
];
<file_sep><?php
declare(strict_types=1);
namespace App;
use Illuminate\Database\Eloquent\Model;
class FuelStation extends Model
{
protected $table = 'fuel_stations';
protected $guarded = ['id'];
public function getStringAttribute()
{
return "{$this->name} ({$this->brand}) ID #{$this->id}";
}
public function scopeEmpty($query)
{
return $query->whereRaw('(((sell_gasoline = false) || (sell_gasoline = true && has_gasoline = false)) AND ((sell_diesel = false) || (sell_diesel = true && has_diesel = false)) AND((sell_lpg = false) || (sell_lpg = true && has_lpg = false)))');
}
public function scopePartial($query)
{
return $query->whereRaw('(!(((sell_gasoline = false) || (sell_gasoline = true && has_gasoline = true)) AND ((sell_diesel = false) || (sell_diesel = true && has_diesel = true)) AND ((sell_lpg = false) || (sell_lpg = true && has_lpg = true))) AND !(((sell_gasoline = false) || (sell_gasoline = true && has_gasoline = false)) AND ((sell_diesel = false) || (sell_diesel = true && has_diesel = false)) AND((sell_lpg = false) || (sell_lpg = true && has_lpg = false))))');
}
public function scopeWithAll($query)
{
return $query->whereRaw('(((sell_gasoline = false) || (sell_gasoline = true && has_gasoline = true)) AND((sell_diesel = false) || (sell_diesel = true && has_diesel = true)) AND ((sell_lpg = false) || (sell_lpg = true && has_lpg = true)))');
}
public function scopeGasoline($query)
{
return $query->where([['sell_gasoline', '=', true], ['has_gasoline','=',true]])->orWhere([['sell_gasoline', '=', false]]);
}
public function scopeDiesel($query)
{
return $query->where([['sell_diesel', '=', true], ['has_diesel','=',true]])->orWhere([['sell_diesel', '=', false]]);
}
public function scopeLPG($query)
{
return $query->where([['sell_lpg', '=', true], ['has_lpg','=',true]])->orWhere([['sell_lpg', '=', false]]);
}
public function scopeNoGasoline($query)
{
return $query->where([['sell_gasoline', '=', true], ['has_gasoline','=',false]]);
}
public function scopeNoDiesel($query)
{
return $query->where([['sell_diesel', '=', true], ['has_diesel','=',false]]);
}
public function scopeNoLPG($query)
{
return $query->where([['sell_lpg', '=', true], ['has_lpg','=',false]]);
}
public function scopeDistricts($query)
{
return $query->selectRaw('district')->groupBy('district')->get()->pluck('district');
}
public function scopeCounties($query, $district)
{
return $query->selectRaw('county')->where('district', '=', $district)->groupBy('county')->get()->pluck('county');
}
public function scopeSellGasoline($query)
{
return $query->where([['sell_gasoline', '=', true]]);
}
public function scopeSellDiesel($query)
{
return $query->where([['sell_diesel', '=', true],]);
}
public function scopeSellLPG($query)
{
return $query->where([['sell_lpg', '=', true]]);
}
public function scopeBrands($query)
{
return $query->selectRaw('brand')->groupBy('brand')->get()->pluck('brand');
}
}
<file_sep><?php
namespace App\Console\Commands;
use App\FuelStation;
use Illuminate\Console\Command;
class UpdateRepasFromUrl extends Command
{
/**
* The name and signature of the console command.
*
* @var string
*/
protected $signature = 'fuelstations:update-repas';
/**
* The console command description.
*
* @var string
*/
protected $description = 'Update repas from google spreadsheet';
/**
* Create a new command instance.
*
* @return void
*/
public function __construct()
{
parent::__construct();
}
private function str_gettsv($input, $delimiter = "\t", $enclosure = '"', $escape = '\\')
{
return \str_getcsv($input, "\t");
}
/**
* Execute the console command.
*
* @return mixed
*/
public function handle()
{
$tsv = \file_get_contents(env('FUELSTATIONS_SOURCE1'));
$fetched_object = \array_map([$this, 'str_gettsv'], \explode("\n", $tsv));
unset($fetched_object[0]);
foreach ($fetched_object as $obj) {
if(!empty($obj[0])){
$s = FuelStation::where('name', $obj[5])->get();
if(isset($s[0])){
$s = $s[0];
$data = array(
'repa' => $obj[0]
);
$s->fill($data);
$s->save();
}
}
}
}
}
<file_sep>window.onload = function(){
renderEntriesLast12Hours("/storage/data/stats_entries_last12.json");
};
<file_sep><?php
declare(strict_types=1);
/*
|--------------------------------------------------------------------------
| Web Routes
|--------------------------------------------------------------------------
|
| Here is where you can register web routes for your application. These
| routes are loaded by the RouteServiceProvider within a group which
| contains the "web" middleware group. Now create something great!
|
*/
Route::get('/', function () {
return view('map');
})->name('map');
Route::get('/stats', function () {
return view('stats');
})->name('stats');
Route::get('/arcgis', function () {
return view('arcgis');
})->name('arcgis');
Route::get('/about', function () {
return view('about');
})->name('about');
Route::get('/media', function () {
return view('media');
})->name('media');
Route::get('/help', function () {
return view('help');
})->name('help');
Auth::routes(['register' => false, 'verify' => true]);
Route::get('panel/login', 'Auth\LoginController@showLoginForm')->name('login');
Route::post('panel/login', 'Auth\LoginController@login');
Route::post('panel/logout', 'Auth\LoginController@logout')->name('logout');
Route::get('panel/password/reset', 'Auth\ForgotPasswordController@showLinkRequestForm')->name('password.request');
Route::post('panel/password/email', 'Auth\ForgotPasswordController@sendResetLinkEmail')->name('password.email');
Route::get('panel/password/reset/{token}', 'Auth\ResetPasswordController@showResetForm')->name('password.reset');
Route::post('panel/password/reset', 'Auth\ResetPasswordController@reset');
Route::prefix('panel')->group(function () {
Route::get('/home', 'HomeController@index')->name('home');
Route::prefix('users')->name('users.')->middleware('auth')->group(function () {
Route::get('add', 'UsersController@add')->name('add');
Route::post('create', 'UsersController@create')->name('create');
Route::get('list', 'UsersController@list')->name('list');
Route::prefix('fetch')->name('fetch.')->group(function () {
Route::get('verified', 'UsersController@fetch_verified')->name('verified');
Route::get('not_verified', 'UsersController@fetch_not_verified')->name('not_verified');
});
Route::get('password', 'UsersController@password')->name('password');
Route::post('verify', 'UsersController@verify')->name('verify');
Route::post('delete', 'UsersController@delete')->name('delete');
Route::post('updatePassword', 'UsersController@updatePassword')->name('updatePassword');
});
Route::prefix('entries')->name('entries.')->group(function () {
Route::get('list', 'EntriesController@list')->middleware('auth')->name('list');
Route::prefix('fetch')->name('fetch.')->middleware('auth')->group(function () {
Route::get('pending', 'EntriesController@fetch_pending')->name('pending');
});
Route::post('push', 'EntriesController@push')->middleware('auth')->name('push');
Route::post('add', 'EntriesController@add')->name('add');
});
Route::prefix('stations')->name('stations.')->middleware('auth')->group(function () {
Route::get('list', 'FuelStationsController@list')->name('list');
Route::prefix('fetch')->name('fetch.')->group(function () {
Route::get('pending', 'FuelStationsController@fetch_all')->name('all');
});
Route::post('update', 'FuelStationsController@update')->name('update');
Route::post('update-available', 'FuelStationsController@updateAvailable')->name('update_available');
});
Route::prefix('options')->name('options.')->middleware('auth')->group(function () {
Route::get('list', 'OptionsController@list')->name('list');
Route::prefix('fetch')->name('fetch.')->group(function () {
Route::get('all', 'OptionsController@fetch_all')->name('all');
});
Route::post('update', 'OptionsController@update')->name('update');
});
Route::prefix('externalauth')->name('externalauth.')->middleware('auth')->group(function () {
Route::get('add', 'ExternalAuthController@add')->name('add');
Route::post('create', 'ExternalAuthController@create')->name('create');
Route::get('list', 'ExternalAuthController@list')->name('list');
Route::prefix('fetch')->name('fetch.')->group(function () {
Route::get('all', 'ExternalAuthController@fetch_all')->name('all');
});
Route::post('delete', 'ExternalAuthController@delete')->name('delete');
});
});
Route::prefix('api/v1')->name('api.')->group(function () {
Route::get('/', 'APIController@home')->name('home');
Route::post('fetch', 'APIController@fetch')->name('fetch');
Route::post('push', 'APIController@push')->name('push');
Route::post('info', 'APIController@info')->name('info');
Route::post('add', 'APIController@add')->name('add');
Route::post('change', 'APIController@change')->name('change');
Route::get('fetchcsv', 'APIController@fetch_csv')->name('fetch_csv');
Route::get('fetchapicsv', 'APIController@fetch_api_csv')->name('fetch_api_csv');
});
Route::prefix('graphs')->name('graphs.')->group(function () {
Route::get('/stats', 'GraphsController@stats')->name('stats');
Route::get('/brands', 'GraphsController@brands')->name('brands');
Route::get('/entries', 'GraphsController@entries')->name('entries');
});
Route::prefix('error')->name('error.')->group(function () {
Route::get('/', 'ErrorController@new')->name('new');
Route::get('edit', 'ErrorController@edit')->name('edit');
Route::get('submitted', 'ErrorController@submitted')->name('submitted');
Route::post('push', 'ErrorController@push')->name('push');
});
<file_sep><?php
declare(strict_types=1);
namespace App\Observers;
use App\Entry;
use App\Option;
class EntryObserver
{
/**
* Handle the entry "created" event.
*
* @param \App\Entry $entry
* @return void
*/
public function created(Entry $entry)
{
$required_count = \intval(Option::find('num_entries_required')->value);
if (\is_nan($required_count)) {
$required_count = 10;
}
if (Entry::lastHour()->related($entry->fuel_station, $entry->has_gasoline, $entry->has_diesel, $entry->has_lpg, $entry->ip)->count() >= $required_count) {
$entry->push();
}
}
/**
* Handle the entry "updated" event.
*
* @param \App\Entry $entry
* @return void
*/
public function updated(Entry $entry)
{
//
}
/**
* Handle the entry "deleted" event.
*
* @param \App\Entry $entry
* @return void
*/
public function deleted(Entry $entry)
{
//
}
/**
* Handle the entry "restored" event.
*
* @param \App\Entry $entry
* @return void
*/
public function restored(Entry $entry)
{
//
}
/**
* Handle the entry "force deleted" event.
*
* @param \App\Entry $entry
* @return void
*/
public function forceDeleted(Entry $entry)
{
//
}
}
|
dfc348abf6208c686a0a9c91b0609209566fdb7d
|
[
"Markdown",
"JavaScript",
"PHP",
"Shell"
] | 37 |
PHP
|
vostpt/greve-transportes-perigosos
|
d647c954bc7f17705427c6002c6331635ef6d622
|
00e2e231e3558057608ebfffd834132638ae2014
|
refs/heads/main
|
<file_sep># Bike-ride-sharing-<file_sep>import csv
import json
import numpy as np
import os
import pandas as pd
import requests
from requests.adapters import HTTPAdapter
import time
# Copyright 2020 by <NAME>.
# All rights reserved.
# This file is part of the map repository found on https://github.com/chiclez/map
def GetBestRoute (data, hours, region, fuel):
# Get the hours schedule for the region
hourSchedule = hours["t"].tolist()
firstHour = hourSchedule[0]
lastHour = hourSchedule[-1] + 1
workingHours = len(hourSchedule)
hourlySalary = 9.2
pay = round(hourlySalary*workingHours,2)
fileName = f"best_route_region{region}.csv"
totalDistance = 0
totalFuelCost = 0
# Call OSRM per hour in the schedule and write the output file containing the OSRM results
for i in range(firstHour, lastHour):
tspData = data.loc[data["t"] == i]
stations = tspData["station_id"].tolist()
stationNames = tspData["station_name"].tolist()
coordinates = tspData.drop(columns=["t", "station_id", "station_name"])
coordinates = pd.DataFrame(coordinates.values.reshape(1,-1))
coordinates.to_csv("temp_stops.csv", index = False, header = False, sep = ";")
with open("temp_stops.csv", "r+") as f:
tempStops = f.read()
tempStops = tempStops.rstrip()
tspUrl = "http://127.0.0.1:5000/trip/v1/driving/"
depot = "-3.157453,55.973447"
pickupUrl = f"{tspUrl}{depot};{tempStops}?roundtrip=false&source=first&destination=last"
os.remove("temp_stops.csv")
s = requests.Session()
s.mount(pickupUrl, HTTPAdapter(max_retries=5))
r = s.get(pickupUrl, timeout=5)
res = r.json()
#with open('distance.json', 'w') as json_file: json.dump(res, json_file, indent = 4)
vanFuelConsumption = 7.6/100 # L/100 km
distance = round(res['trips'][0]['distance']/1000,1)
duration = int(res['trips'][0]['duration']/60)
fuelCost = round(distance*(vanFuelConsumption)*(fuel),1) #in sterling per hour (or trip)
totalStations = len(stations)
totalDistance += distance
totalFuelCost += fuelCost
header = "time\tkm\t\t£\t\tmin\t\tstations\tstation name"
if(i == firstHour):
with open(fileName, 'w') as file:
file.write(header)
file.write("\n")
file.write("="*(len(header)+20))
file.write(f"\n{i}\t\t{distance}\t{fuelCost}\t\t{duration}\t\t{totalStations}\t\t\t")
else:
with open(fileName, 'a') as file:
file.seek(0, os.SEEK_END)
file.write(f"\n{i}\t\t{distance}\t{fuelCost}\t\t{duration}\t\t{totalStations}\t\t\t")
for j in range(0, len(stations)):
leg = res['waypoints'][j]['waypoint_index'] - 1
stop = stations[leg]
stopName = stationNames[leg]
with open(fileName, 'a') as file:
if(j == len(stations)-1):
file.write(f"{stopName}\n")
else:
file.write(f"{stopName}|")
totalFuelCost = round(totalFuelCost, 1)
totalDistance = round(totalDistance, 1)
with open(fileName, 'a') as f:
header = f"\nRegion {region} summary\n"
f.write(header)
f.write("="*len(header))
f.write(f"\n\nTotal fuel cost: £{totalFuelCost}\n")
f.write(f"Total distance: {totalDistance} km\n")
f.write(f"Total working hours: {workingHours}h\n")
f.write(f"Total pay: £{pay}\n")
f.write("\nHappy ride!")
return
def Dat_NetAdd(processedData):
# Generate the appropriate dataframe form
hourlyStart = pd.DataFrame({'startFreq': processedData.groupby(["start_station_id",
"start_station_name", "hour_started_at"]).size()}).reset_index()
hourlyEnd = pd.DataFrame({'endFreq': processedData.groupby(["end_station_id",
"end_station_name", "hour_ended_at"]).size()}).reset_index()
hourlyStart.rename(columns = {"start_station_id":"station_id", "hour_started_at":"hour"}, inplace = True)
hourlyEnd.rename(columns = {"end_station_id":"station_id", "hour_ended_at":"hour"}, inplace = True)
preDat = pd.merge(hourlyStart,hourlyEnd, how = 'outer', left_on=['station_id','hour'], right_on = ['station_id','hour'])
preDat = preDat.fillna(0)
preDat["net_add"] = preDat["endFreq"] - preDat["startFreq"]
datFile = preDat[["station_id", "hour", "net_add"]]
datFile = datFile.sort_values(by=["station_id", "hour"], ascending = True)
net_add = datFile.pivot_table(index=['station_id'], columns=['hour'],
values='net_add', aggfunc='first', fill_value=0)
return net_add
def Dat_BikeInit(net_add):
init = np.zeros((net_add.shape[0],24))
for i in range(0, init.shape[0]):
init[i,0] = 5
bikeInit = pd.DataFrame(data = init)
return bikeInit
def Dat_CityDivision(processedData):
# Generate the appropriate dataframe form
netStart = pd.DataFrame({'startFreq': processedData.groupby(["start_station_id",
"start_station_name", "start_station_latitude",
"start_station_longitude"]).size()}).reset_index()
netEnd = pd.DataFrame({'endFreq': processedData.groupby(["end_station_id",
"end_station_name", "end_station_latitude",
"end_station_longitude"]).size()}).reset_index()
netStart.rename(columns =
{"start_station_id":"station_id",
"start_station_latitude": "Latitude",
"start_station_longitude": "Longitude"},
inplace = True)
netEnd.rename(columns =
{"end_station_id":"station_id",
"end_station_latitude": "Latitude",
"end_station_longitude": "Longitude"},
inplace = True)
preDat = pd.merge(netStart,netEnd, how = 'outer',
left_on=['station_id', 'Latitude', 'Longitude'],
right_on = ['station_id', 'Latitude', 'Longitude'])
preDat = preDat.fillna(0)
preDat["net_flow_in"] = ((preDat["endFreq"] - preDat["startFreq"]) > 0).astype(int)
preDat["net_flow_out"] = ((preDat["startFreq"] - preDat["endFreq"]) > 0).astype(int)
datFile = preDat[["station_id", "Latitude", "Longitude", "startFreq",
"endFreq", "net_flow_in", "net_flow_out"]]
datFile = datFile.sort_values(by=["startFreq", "endFreq"], ascending = True)
latitude = datFile["Latitude"]
longitude = datFile["Longitude"]
net_flow_in = datFile["net_flow_in"]
net_flow_out = datFile["net_flow_out"]
station_id = datFile["station_id"]
return latitude, longitude, net_flow_in, net_flow_out, station_id
def Regions(divisionFile):
allRegions = pd.read_csv(divisionFile, dtype={"region": int})
allRegions = allRegions.sort_values(by=["region"])
region1 = allRegions.loc[allRegions["region"] == 1]
region2 = allRegions.loc[allRegions["region"] == 2]
region3 = allRegions.loc[allRegions["region"] == 3]
# Create the region array
region1 = region1.drop(columns = ["region"])
region2 = region2.drop(columns = ["region"])
region2 = region3.drop(columns = ["region"])
return region1, region2, region3
def OutputNet_Add(netAdd, bikeInit, stations, region):
curDir = os.getcwd()
netAddFile = os.path.join(curDir, f"net_add_region{region}.dat")
bikeInitFile = os.path.join(curDir, f"bike_init_region{region}.dat")
stationFile = os.path.join(curDir, f"stations_region{region}.dat")
netAdd.to_csv(netAddFile, index = False, header = False, sep = " ")
bikeInit.to_csv(bikeInitFile, index = False, header = False, sep = " ")
stations.to_csv(stationFile, index = False, header = False, sep = " ")
f1 = open(bikeInitFile)
f2 = open(stationFile)
with open(netAddFile, "r+") as f:
contents = f.read()
ini_bikes = f1.read()
station_id = f2.read()
f.seek(0) # rewind
f.write("net_add: [" + contents + "]\n")
f.write("ini_bikes: [" + ini_bikes + "]\n")
f.write("station_id: [" + station_id + "]")
f1.close()
f2.close()
os.remove(bikeInitFile)
os.remove(stationFile)
print(f"Created {netAddFile}")
return
def DataProcessing(filePath, weekNumber, tsp):
'''
Common DataProcessing instance for all functions
'''
rawData = pd.read_csv(filePath, parse_dates = [1,2],
dtype={'start_station_longitude': float, 'start_station_latitude': float,
'end_station_longitude': float, 'end_station_latitude': float})
rawData['start_station_longitude'] = (rawData['start_station_longitude'].round(5)).astype(str)
rawData['start_station_latitude'] = (rawData['start_station_latitude'].round(5)).astype(str)
rawData['end_station_longitude'] = (rawData['end_station_longitude'].round(5)).astype(str)
rawData['end_station_latitude'] = (rawData['end_station_latitude'].round(5)).astype(str)
# Ensure the timestamps are true timestamps
start_date = pd.to_datetime(rawData['started_at'])
end_date = pd.to_datetime(rawData['ended_at'])
# Add Day of the Month, Day of the Week and Week number as columns
rawData['weekday_start_at'] = pd.to_datetime(rawData['started_at']).dt.dayofweek
rawData['weekday_ended_at'] = pd.to_datetime(rawData['ended_at']).dt.dayofweek
rawData['week_start_at'] = pd.to_datetime(rawData['started_at']).dt.isocalendar().week
rawData['week_ended_at'] = pd.to_datetime(rawData['ended_at']).dt.isocalendar().week
rawData['monthday_start_at'] = pd.to_datetime(rawData['started_at']).dt.day
rawData['monthday_ended_at'] = pd.to_datetime(rawData['ended_at']).dt.day
rawData['hour_started_at'] = pd.to_datetime(rawData['started_at']).dt.hour
rawData['hour_ended_at'] = pd.to_datetime(rawData['ended_at']).dt.hour
#Normal coordinates, for the rest of the world.
rawData['pickupCoordinates_normal'] = rawData[['start_station_latitude', 'start_station_longitude']].apply(lambda x: ','.join(x), axis=1)
rawData['dropCoordinates_normal'] = rawData[['end_station_latitude', 'end_station_longitude']].apply(lambda x: ','.join(x), axis=1)
# Get coordinates in a suitable format for OSRM
rawData['pickupCoordinates'] = rawData[['start_station_longitude', 'start_station_latitude']].apply(lambda x: ','.join(x), axis=1)
rawData['dropCoordinates'] = rawData[['end_station_longitude', 'end_station_latitude']].apply(lambda x: ','.join(x), axis=1)
if(tsp == 0):
# Get the user-selected week
processedData = rawData.loc[rawData['week_start_at'] == int(weekNumber)]
else:
processedData = pd.DataFrame({'count':
rawData.groupby(["pickupCoordinates","start_station_id", "start_station_name"]).size()}).reset_index()
return processedData
def Tsp():
'''
This script will calculate the combinations for the Travelling salesman problem
for Edinburgh's JustEat bike sharing service using a Docker OSRM server per hour
Input:
open_data csv: This can be found in Edinburgh's Just Eat site
Output:
best_route_region.csv: A csv file with all the required information for Mosel
i.e. distance, duration, node visits
'''
header = "Travelling Salesman Problem (TSP) Combinations"
print(header)
print("-"*len(header))
print("Enter the full path csv file location for the unbalanced stations (i.e. C:/Documents/unbalanced.csv)")
unbalanced = input()
print("Can I get today's fuel price? Please enter it in pounds per liter")
fuel = float(input())
print("What region is this for? Valid values are 1, 2 and 3")
region = input()
curDir = os.getcwd()
unbalancedPath = os.path.abspath(unbalanced)
unbalancedName = os.path.basename(unbalanced)
dictPath = os.path.join(curDir, "raw_data\stationsDic.csv")
if os.path.isfile(unbalanced) != True:
print (f"{unbalancedName} does not exist. Try again")
return
print("All good. Processing the file... \n")
stationsDic = pd.read_csv(dictPath)
stationsDic = stationsDic.drop(columns = ["count"])
unbalancedStations = pd.read_csv(unbalancedPath)
tspData = pd.merge(unbalancedStations,stationsDic, how = "inner", on="station_id")
tspData = tspData.sort_values(by = ["t"])
hours = pd.DataFrame({'stations': tspData.groupby(["t"]).size()}).reset_index()
tsp = GetBestRoute(tspData, hours, region, fuel)
return
def NetAdd():
'''
This function will read a csv file and will create a csv that Mosel can read
with the most popular routes and stations in a certain week.
The csv files will be output in the same directory as the working directory of
this Python script
input:
csvFile = the csv file path containing the accumulated monthly data
i.e. "C:\Documents\08.csv"
Day/Month selector = a flag that will indicate the function to calculate the
demandas and routes using either previous day or previous week data.
0 for day, and 1 for month.
weekNumber = required week number for calculating the demands and routes using
the OSRM API
Output: None
'''
header = "Net_add script"
print(header)
print("-"*len(header))
print("Enter the full path csv file location (i.e. C:/Documents/08.csv). Press Enter when done.")
csvFile = input()
print("Enter the full path txt file location containing the city division (i.e. C:/Documents/split.txt). Press Enter when done.")
divisionFile = input()
print("Enter the week number. Consider that there are 52 weeks in a year. Press Enter when done.")
weekNumber = int(input())
if(type(weekNumber) != int):
print("The number is invalid. Try again")
return
curDir = os.getcwd()
filePath = os.path.abspath(csvFile)
fileName = os.path.basename(csvFile)
divisionPath = os.path.abspath(divisionFile)
divisioName = os.path.basename(divisionFile)
if os.path.isfile(csvFile) != True:
print (f"{fileName} does not exist. Try again")
return
if os.path.isfile(divisionFile) != True:
print (f"{divisionFile} does not exist. Try again")
return
print("All good. Processing the file... \n")
#Process the csv files
region1, region2, region3 = Regions(divisionFile)
latestData = DataProcessing(filePath, weekNumber, tsp = 0)
netAdd = Dat_NetAdd(latestData)
# Split the data per region
netAddR1 = pd.merge(netAdd, region1, how = "inner", on = ["station_id"])
netAddR1 = netAddR1.drop(columns = ["station_id"])
netAddR2 = pd.merge(netAdd, region2, how = "inner", on = ["station_id"])
netAddR2 = netAddR2.drop(columns = ["station_id"])
netAddR3 = pd.merge(netAdd, region3, how = "inner", on = ["station_id"])
netAddR3 = netAddR3.drop(columns = ["station_id"])
# Create the init_bikes arrays
bikeInitR1 = Dat_BikeInit(netAddR1)
bikeInitR2 = Dat_BikeInit(netAddR2)
bikeInitR3 = Dat_BikeInit(netAddR3)
stationR1 = pd.DataFrame(region1.values.reshape(1,-1))
stationR2 = pd.DataFrame(region2.values.reshape(1,-1))
stationR3 = pd.DataFrame(region3.values.reshape(1,-1))
# Generate the net_add.dat files for the required timeframe per region
fileRegion1 = OutputNet_Add(netAddR1, bikeInitR1, stationR1, 1)
fileRegion2 = OutputNet_Add(netAddR2, bikeInitR2, stationR2, 2)
fileRegion3 = OutputNet_Add(netAddR3, bikeInitR3, stationR3, 3)
return
def CityDivision():
'''
This function will read a csv file and will create a csv that Mosel can read
with the most popular routes and stations in a certain week.
The csv files will be output in the same directory as the working directory of
this Python script
input:
csvFile = the csv file path containing the accumulated monthly data
i.e. "C:\Documents\08.csv"
Day/Month selector = a flag that will indicate the function to calculate the
demandas and routes using either previous day or previous week data.
0 for day, and 1 for month.
weekNumber = required week number for calculating the demands and routes using
the OSRM API
weekDay = Previous day for calculating the demands and routes using the OSRM API
Output: None
'''
header = "City divisions"
print(header)
print("-"*len(header))
print("Enter the full path csv file location (i.e. C:/Documents/08.csv). Press Enter when done.")
csvFile = input()
print("Enter the week number. Consider that there are 52 weeks in a year. Press Enter when done.")
weekNumber = int(input())
if(type(weekNumber) != int):
print("The number is invalid. Try again")
return
curDir = os.getcwd()
filePath = os.path.abspath(csvFile)
fileName = os.path.basename(csvFile)
if os.path.isfile(csvFile) != True:
print (f"{fileName} does not exist. Try again")
return
if os.path.isfile(csvFile) != True:
print (f"{fileName} does not exist. Try again")
return
print("All good. Processing the file... \n")
latestData = DataProcessing(filePath, weekNumber, tsp = 0)
latitude, longitude, net_flow_in, net_flow_out, station_id = Dat_CityDivision(latestData)
latitude = pd.DataFrame(latitude.values.reshape(1,-1))
longitude = pd.DataFrame(longitude.values.reshape(1,-1))
net_flow_in = pd.DataFrame(net_flow_in.values.reshape(1,-1))
net_flow_out = pd.DataFrame(net_flow_out.values.reshape(1,-1))
station_id = pd.DataFrame(station_id.values.reshape(1,-1))
# Generate the dat files for the required timeframe
latitude.to_csv(os.path.join(curDir, "city_division.dat"), index = False, header = False, sep = " ")
longitude.to_csv(os.path.join(curDir, "longitude.dat"), index = False, header = False, sep = " ")
net_flow_in.to_csv(os.path.join(curDir, "net_flow_in.dat"), index = False, header = False, sep = " ")
net_flow_out.to_csv(os.path.join(curDir, "net_flow_out.dat"), index = False, header = False, sep = " ")
station_id.to_csv(os.path.join(curDir, "station_id.dat"), index = False, header = False, sep = " ")
f1 = open('longitude.dat')
f2 = open('net_flow_in.dat')
f3 = open('net_flow_out.dat')
f4 = open('station_id.dat')
with open("city_division.dat", "r+") as f:
old = f.read() # read everything in the file
longitude = f1.read()
net_flow_in = f2.read()
net_flow_out = f3.read()
station_id = f4.read()
f.seek(0) # rewind
f.write("Latitude: [" + old + "]\n") # write the new line before
f.write("Longitude: [" + longitude + "]\n")
f.write("net_flow_in: [" + net_flow_in + "]\n")
f.write("net_flow_out: [" + net_flow_out + "]\n")
f.write("station_id: [" + station_id + "]")
f1.close()
f2.close()
f3.close()
f4.close()
os.remove("longitude.dat")
os.remove("net_flow_in.dat")
os.remove("net_flow_out.dat")
os.remove("station_id.dat")
print("Created city_division.dat")
return
def Cli():
'''
CLI interface for exploring the different functions.
'''
header = "Bike sharing script"
print(header)
print(len(header)*"=")
print("Script options:")
print("1: Generate the city_division.dat initialization file for city_division.mos")
print("2: Generate the net_add.dat initialization files per region for bike.mos")
print("3: Calculate the travelling salesman distances for a defined number of stations using OSRM API")
print("0: Exit")
print("Select an option. Press enter when done")
select = input()
menu = True
while(menu == True):
if(select == '0'):
print("Ciao!")
break
elif(select == '1'):
edin = CityDivision()
elif(select == '2'):
edin = NetAdd()
elif(select == '3'):
tspEdin = Tsp()
elif(select == '4'): #Easter egg
combo = Combinations()
else:
print("Invalid selection. Try again using an appropriate selection.")
select = input()
print("Done. Need something else? Select an option from the menu. If not, enter 0")
select = input()
return
Cli()
|
d88b994c9ea889c24a3c21abe7a10c3a25a163a9
|
[
"Markdown",
"Python"
] | 2 |
Markdown
|
Ishaanjolly/Bike-ride-sharing-
|
937219aa20ec5914b774f793c567e31f2272d2cf
|
ea31eabb7637983016a15ac92e4a271309aa2d3a
|
refs/heads/master
|
<repo_name>Ingalls12/hangman<file_sep>/ahorcadoSolo.py
import random
def main():
titulo()
juego()
def juego():
jug = jugadores()
lista = ["Ornitorrinco", "Adivinanza","Estres", "Ventilador","Modem","Ensayo", "Buenavista"]
palabra = lista[random.randint(0,len(lista)-1)]
print(f"Adivina la palabra {jug[1]}:")
print(adivinar(palabra, jug))
def adivinar(palabra, jug):
palabra = palabra.lower()
nespacios = []
for a in range(len(palabra)):
nespacios.append("_")
num = 0
while (num <6):
print(plano_juego(num))
print(nespacios)
letra = input(f"Dime que letra crees que este en la palabra {jug[1]}:\n").lower()
if letra in nespacios:
print("ya utilizaste esa letra")
num += 1
elif letra in palabra:
for pal in range(len(palabra)):
if letra == palabra[pal]:
nespacios[pal] = palabra[pal]
else:
print("Esa letra no esta en la palabra")
num += 1
if "_" not in nespacios:
nespacios = "".join(nespacios).title()
print(nespacios)
return f"Feliciades Ganaste {jug[1]}"
return f"Perdiste {jug[1]} la palabra era {palabra}"
def titulo():
print("////////////////")
print("///AHORCADO/////")
print("////////////////")
def plano_juego(num):
if num == 0:
print()
print("\t------------------------")
print("\t| |")
print("\t| |")
print("\t|")
print("\t|")
print("\t|")
print("\t|")
print("\t|")
print("\t|")
print("\t|")
print("-----------")
elif num == 1:
plano_juego1()
elif num == 2:
plano_juego2()
elif num == 3:
plano_juego3()
elif num == 4:
plano_juego4()
elif num == 5:
plano_juego5()
elif num == 6:
plano_juego6()
return ""
def plano_juego6():
print()
print("\t------------------------")
print("\t| |")
print("\t| _")
print("\t| |_|")
print("\t| \ | / ")
print("\t| \|/ ")
print("\t| | ")
print("\t| / \ ")
print("\t| / \ ")
print("\t|")
print("-----------")
def plano_juego5():
print()
print("\t------------------------")
print("\t| |")
print("\t| _")
print("\t| |_|")
print("\t| \ | / ")
print("\t| \|/ ")
print("\t| | ")
print("\t| / ")
print("\t| / ")
print("\t|")
print("-----------")
def plano_juego4():
print()
print("\t------------------------")
print("\t| |")
print("\t| _")
print("\t| |_|")
print("\t| \ | / ")
print("\t| \|/ ")
print("\t| | ")
print("\t| ")
print("\t| ")
print("\t|")
print("-----------")
def plano_juego3():
print()
print("\t------------------------")
print("\t| |")
print("\t| _")
print("\t| |_|")
print("\t| | / ")
print("\t| |/ ")
print("\t| | ")
print("\t| ")
print("\t| ")
print("\t|")
print("-----------")
def plano_juego2():
print()
print("\t------------------------")
print("\t| |")
print("\t| _")
print("\t| |_|")
print("\t| | ")
print("\t| | ")
print("\t| | ")
print("\t| ")
print("\t| ")
print("\t|")
print("-----------")
def plano_juego1():
print()
print("\t------------------------")
print("\t| |")
print("\t| _")
print("\t| |_|")
print("\t| ")
print("\t| ")
print("\t| ")
print("\t| ")
print("\t| ")
print("\t|")
print("-----------")
def jugadores():
j1 = "Computadora"
j2 = input("Escribe tu nombre jugador: ")
return j1,j2
if __name__ == '__main__':
main()
|
867040d3af1a5418cabf675a95ee56e0f8a44fde
|
[
"Python"
] | 1 |
Python
|
Ingalls12/hangman
|
ea737d290b670c0cbd98fdfd457e8587f79e29b3
|
766c96ca747e21bda5b977ac4823374956c5ec82
|
refs/heads/master
|
<repo_name>FaycalHrid/cloudin<file_sep>/CloudIn/src/java/controllers/ProjetController.java
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package controllers;
import javax.inject.Named;
import javax.enterprise.context.SessionScoped;
import java.io.Serializable;
import bean.Projet;
import java.util.List;
import javax.ejb.EJB;
import services.ProjetFacade;
/**
*
* @author asus
*/
@Named(value = "projetController")
@SessionScoped
public class ProjetController implements Serializable {
private Projet selected;
private List<Projet> items;
@EJB
private ProjetFacade ejbFacade;
public Projet getSelected() {
return selected;
}
public void setSelected(Projet selected) {
this.selected = selected;
}
public List<Projet> getItems() {
items = ejbFacade.findAll();
return items;
}
public void setItems(List<Projet> items) {
this.items = items;
}
public ProjetFacade getEjbFacade() {
return ejbFacade;
}
public void setEjbFacade(ProjetFacade ejbFacade) {
this.ejbFacade = ejbFacade;
}
public String getNombreProjets()
{
Integer nbr = getItems().size();
return nbr.toString();
}
/**
* Creates a new instance of ProjetController
*/
public ProjetController() {
}
}
<file_sep>/cloudin bd à importer.sql
-- phpMyAdmin SQL Dump
-- version 4.7.9
-- https://www.phpmyadmin.net/
--
-- Hôte : 127.0.0.1:3306
-- Généré le : lun. 05 nov. 2018 à 19:15
-- Version du serveur : 5.7.21
-- Version de PHP : 5.6.35
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
SET AUTOCOMMIT = 0;
START TRANSACTION;
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8mb4 */;
--
-- Base de données : `cloudin`
--
-- --------------------------------------------------------
--
-- Structure de la table `equipe`
--
DROP TABLE IF EXISTS `equipe`;
CREATE TABLE IF NOT EXISTS `equipe` (
`ID` bigint(20) NOT NULL,
`NOM` varchar(255) DEFAULT NULL,
PRIMARY KEY (`ID`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
--
-- Déchargement des données de la table `equipe`
--
INSERT INTO `equipe` (`ID`, `NOM`) VALUES
(1, 'Equipe 1'),
(2, 'Equipe 2');
-- --------------------------------------------------------
--
-- Structure de la table `projet`
--
DROP TABLE IF EXISTS `projet`;
CREATE TABLE IF NOT EXISTS `projet` (
`ID` bigint(20) NOT NULL,
`DATEDEBUT` date DEFAULT NULL,
`DATEFIN` date DEFAULT NULL,
`DESCRIPTION` varchar(255) DEFAULT NULL,
`TITRE` varchar(255) DEFAULT NULL,
`EQUIPE_ID` bigint(20) DEFAULT NULL,
PRIMARY KEY (`ID`),
KEY `FK_PROJET_EQUIPE_ID` (`EQUIPE_ID`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Structure de la table `repo`
--
DROP TABLE IF EXISTS `repo`;
CREATE TABLE IF NOT EXISTS `repo` (
`ID` bigint(20) NOT NULL,
`DESCRIPTION` varchar(255) DEFAULT NULL,
`NOM` varchar(255) DEFAULT NULL,
`TACHE_ID` bigint(20) DEFAULT NULL,
PRIMARY KEY (`ID`),
KEY `FK_REPO_TACHE_ID` (`TACHE_ID`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Structure de la table `sequence`
--
DROP TABLE IF EXISTS `sequence`;
CREATE TABLE IF NOT EXISTS `sequence` (
`SEQ_NAME` varchar(50) NOT NULL,
`SEQ_COUNT` decimal(38,0) DEFAULT NULL,
PRIMARY KEY (`SEQ_NAME`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
--
-- Déchargement des données de la table `sequence`
--
INSERT INTO `sequence` (`SEQ_NAME`, `SEQ_COUNT`) VALUES
('SEQ_GEN', '100');
-- --------------------------------------------------------
--
-- Structure de la table `solution`
--
DROP TABLE IF EXISTS `solution`;
CREATE TABLE IF NOT EXISTS `solution` (
`ID` bigint(20) NOT NULL,
`DATEPOST` date DEFAULT NULL,
`DESCRIPTION` varchar(255) DEFAULT NULL,
`NOM` varchar(255) DEFAULT NULL,
`TAILLE` double DEFAULT NULL,
`REPO_ID` bigint(20) DEFAULT NULL,
PRIMARY KEY (`ID`),
KEY `FK_SOLUTION_REPO_ID` (`REPO_ID`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Structure de la table `tache`
--
DROP TABLE IF EXISTS `tache`;
CREATE TABLE IF NOT EXISTS `tache` (
`ID` bigint(20) NOT NULL,
`DATEDEBUT` date DEFAULT NULL,
`DATEFIN` date DEFAULT NULL,
`DESCRIPTION` varchar(255) DEFAULT NULL,
`TITRE` varchar(255) DEFAULT NULL,
`PROJET_ID` bigint(20) DEFAULT NULL,
PRIMARY KEY (`ID`),
KEY `FK_TACHE_PROJET_ID` (`PROJET_ID`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Structure de la table `utilisateur`
--
DROP TABLE IF EXISTS `utilisateur`;
CREATE TABLE IF NOT EXISTS `utilisateur` (
`ID` bigint(20) NOT NULL AUTO_INCREMENT,
`EMAIL` varchar(255) DEFAULT NULL,
`NOM` varchar(255) DEFAULT NULL,
`PASSWORD` varchar(255) DEFAULT NULL,
`PHOTO` varchar(255) DEFAULT NULL,
`PRENOM` varchar(255) DEFAULT NULL,
`USERNAME` varchar(255) DEFAULT NULL,
PRIMARY KEY (`ID`)
) ENGINE=MyISAM AUTO_INCREMENT=52 DEFAULT CHARSET=latin1;
--
-- Déchargement des données de la table `utilisateur`
--
INSERT INTO `utilisateur` (`ID`, `EMAIL`, `NOM`, `PASSWORD`, `PHOTO`, `PRENOM`, `USERNAME`) VALUES
(1, '<EMAIL>', 'hrid', '123456', NULL, 'faycal', 'faycalhrid'),
(51, '<EMAIL>', 'jallal', '123456', NULL, 'reda', '<PASSWORD>');
-- --------------------------------------------------------
--
-- Structure de la table `utilisateur_equipe`
--
DROP TABLE IF EXISTS `utilisateur_equipe`;
CREATE TABLE IF NOT EXISTS `utilisateur_equipe` (
`Utilisateur_ID` bigint(20) NOT NULL,
`Equipes_ID` bigint(20) NOT NULL,
PRIMARY KEY (`Utilisateur_ID`,`Equipes_ID`),
KEY `FK_UTILISATEUR_EQUIPE_Equipes_ID` (`Equipes_ID`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
COMMIT;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
|
6fc1431a7dff3074173d6de2089db8445e91d667
|
[
"Java",
"SQL"
] | 2 |
Java
|
FaycalHrid/cloudin
|
9a1b6ddb9f6a104928d971b3d2b45c7ade0557e1
|
cf899716c048c5fbff7be4293811a56318521d35
|
refs/heads/master
|
<repo_name>JCMais/react-suspense-intro<file_sep>/src/api/api.js
import Octokit from '@octokit/rest';
import { delay } from '../utils';
import orgsFixture from './fixtures/orgs.json';
import contributorsFixture from './fixtures/contributors.json';
const api = new Octokit();
const DEFAULT_DELAY = 2000;
const IS_MOCK_ENABLED = false;
export const getOrgRepos = async org => {
await delay(DEFAULT_DELAY);
const { data } = IS_MOCK_ENABLED
? orgsFixture[org]
: await api.repos.listForOrg({
org
});
return data
.sort((a, b) => b.stargazers_count - a.stargazers_count)
.slice(0, 5);
};
export const getRepoContributors = async fullRepoName => {
await delay(DEFAULT_DELAY);
const namePieces = fullRepoName.split('/', 2);
const { data } = IS_MOCK_ENABLED
? contributorsFixture[`${namePieces[0]}/${namePieces[1]}`]
: await api.repos.listContributors({
owner: namePieces[0],
repo: namePieces[1]
});
// data is already sorted
return data.slice(0, 5);
};
<file_sep>/src/components/repo/RepoList.js
import React from 'react';
import { RepoItem } from './RepoItem';
export function RepoList(props) {
const { onRepoClick, repos } = props;
return (
<ul className="repo-list">
{repos.map(repo => (
<RepoItem key={repo.id} repo={repo} onClick={onRepoClick} />
))}
</ul>
);
}
<file_sep>/src/components/contributor/ContributorItem.js
import React from 'react';
import { createResourceFetcher } from '../../cache';
const imageFetcher = createResourceFetcher(src => {
return new Promise(resolve => {
const image = new Image();
image.onload = () => resolve(src);
image.src = src;
});
});
function ContributorAvatar(props) {
return <img alt="" {...props} src={imageFetcher.read(props.src)} />;
}
export function ContributorItem(props) {
const { contributor } = props;
return (
<li key={contributor.node_id}>
<ContributorAvatar
className="contributor-image"
alt=""
src={contributor.avatar_url}
/>
<span className="contributor-login">{contributor.login}</span>
</li>
);
}
<file_sep>/src/components/OrgRepos.js
import React, { Suspense } from 'react';
import { reposFetcher } from '../api/fetchers';
import { RepoContributors } from './RepoContributors';
import { LoadingSpinner } from './LoadingSpinner';
import { RepoList } from './repo/RepoList';
function RepoListSuspense(props) {
const data = reposFetcher.read(props.orgName);
return <RepoList repos={data} onRepoClick={props.onRepoClick} />;
}
export class OrgRepos extends React.Component {
state = {
isShowingRepoContributors: null
};
onRepoClick = fullRepoName => {
this.setState({
isShowingRepoContributors: fullRepoName
});
};
onBack = () => {
this.setState({
isShowingRepoContributors: null
});
};
renderContent() {
const { isShowingRepoContributors } = this.state;
return isShowingRepoContributors ? (
<div>
<button onClick={this.onBack}>Back</button>
<br />
<RepoContributors fullRepoName={isShowingRepoContributors} />
</div>
) : (
<RepoListSuspense
orgName={this.props.orgName}
onRepoClick={this.onRepoClick}
/>
);
}
render() {
return (
<Suspense maxDuration={600} fallback={<LoadingSpinner />}>
{this.renderContent()}
</Suspense>
);
}
}
<file_sep>/src/components/LoadingSpinner.js
import React from 'react';
import logo from '../logo.svg';
export function LoadingSpinnerImage(props) {
return (
<img
src={logo}
className={`loading-spinner ${props.className}`}
alt="logo"
/>
);
}
export function LoadingSpinner() {
return (
<div>
<h2>Loading...</h2>
<LoadingSpinnerImage />
</div>
);
}
<file_sep>/src/components/contributor/ContributorList.js
import React from 'react';
import { ContributorItem } from './ContributorItem';
export function ContributorList(props) {
const { data } = props;
return (
<ul className="contributor-list">
{data.map(contributor => {
return (
<ContributorItem key={contributor.id} contributor={contributor} />
);
})}
</ul>
);
}
<file_sep>/src/components/RepoContributors.js
import React from 'react';
import { contributorsFetcher } from '../api/fetchers';
import { ContributorList } from './contributor/ContributorList';
function ContributorListSuspense(props) {
const data = contributorsFetcher.read(props.fullRepoName);
return <ContributorList data={data} />;
}
export function RepoContributors(props) {
return <ContributorListSuspense fullRepoName={props.fullRepoName} />;
}
<file_sep>/src/App.js
import React, { Component } from 'react';
import './App.css';
import { OrgRepos } from './components/OrgRepos';
class App extends Component {
state = {
org: ''
};
constructor(props) {
super(props);
this.inputRef = React.createRef();
}
onGetRepos = evt => {
evt.preventDefault();
this.setState({
org: this.inputRef.current ? this.inputRef.current.value : ''
});
};
render() {
const { org } = this.state;
return (
<div className="app-wrapper">
<h1>React Suspense - Demo</h1>
<form onSubmit={this.onGetRepos}>
<input ref={this.inputRef} />
<button type="submit">Get Repos</button>
</form>
{!!org && <OrgRepos orgName={org} />}
</div>
);
}
}
export default App;
<file_sep>/src/components/repo/RepoItem.js
import React from 'react';
export function RepoItem(props) {
const { onClick, repo } = props;
return <li onClick={() => onClick(repo.full_name)}>{repo.name}</li>;
}
<file_sep>/README.md
# React Suspense Demo
This is a demo I've prepared for a talk at [React Cwb][react-cwb] meetup, that happened on 2018-11-29
There are multiple branches you can compare against to check the diffs.
Master will have the most updated code, using suspense.
[react-cwb]:https://www.meetup.com/pt-BR/ReactJS-CWB/
<file_sep>/src/cache.js
import { unstable_createResource } from 'react-cache';
export function createResourceFetcher(callback) {
const resource = unstable_createResource(callback);
return {
read(...args) {
return resource.read(...args);
}
};
}
|
64a202ffbe9f71f9c94fd471fae5e92250db3965
|
[
"JavaScript",
"Markdown"
] | 11 |
JavaScript
|
JCMais/react-suspense-intro
|
3b9e0ef18f6da2ccf4882e66f9604671823aad75
|
a95c7b0a32bcbea7bcd2703e59b9d2af35ff5983
|
refs/heads/master
|
<repo_name>faros/workshop-ui-todo<file_sep>/src/app/facade/todo.facade.spec.ts
import {async, TestBed} from '@angular/core/testing';
import SpyObj = jasmine.SpyObj;
import {TodoService} from '../services/todo.service';
import {SnackBarService} from '../services/snack-bar.service';
import {NEVER, of, throwError} from 'rxjs';
import {Todo} from '../models/todo.model';
import {TodoFacade} from './todo.facade';
describe('TodoFacade', () => {
let todoService: SpyObj<TodoService>;
let snackBarService: SpyObj<SnackBarService>;
let facade: SpyObj<TodoFacade>;
beforeEach(async(() => {
const component = TestBed.configureTestingModule({
providers: [
{
provide: TodoService,
useFactory: () => jasmine.createSpyObj('TodoService', [
'getTodos', 'createTodo', 'updateTodo', 'deleteTodo'
])
},
{
provide: SnackBarService,
useFactory: () => jasmine.createSpyObj('SnackBarService', [
'showSnackBar'
])
},
TodoFacade
]
});
component.compileComponents();
}));
beforeEach(() => {
// Notice how we are not using fixture here, we don't have to render any HTML/components.
// We also didn't declare any components in our testingModule
todoService = TestBed.get(TodoService);
snackBarService = TestBed.get(SnackBarService);
facade = TestBed.get(TodoFacade);
});
describe('refreshTodos', () => {
beforeEach(() => {
// By default a spy returns undefined, to avoid a NPE when the code tries to do
// todoService.getTodos().subscribe()
// we return an observable which never completes (and thus we avoid executing the subscribe function).
todoService.getTodos.and.returnValue(NEVER);
});
it('should start loading', () => {
facade.refreshTodos();
// Even though you might think observable are async, we can write our tests like this
// because they are synchronous while testing.
facade.isLoading().subscribe(isLoading => {
expect(isLoading).toBeTruthy();
});
});
it('should fetch the todos using the todoService', () => {
// TODO: implement
});
describe('given the call succeeds', () => {
it('should stop loading', () => {
// Return an actual value this time.
todoService.getTodos.and.returnValue(of([]));
facade.refreshTodos();
// TODO: implement isLoading check.
});
it('should update the todo observables', () => {
// Return some data this time
todoService.getTodos.and.returnValue(of([{
id: '1',
text: 'text',
isCompleted: false
} as Todo]));
facade.refreshTodos();
// TODO: similarly to how we tested isLoading, test both the getCompletedTodos and getIncompletedTodos observables.
});
});
describe('given the call fails', () => {
it('should display a snackBar message', () => {
todoService.getTodos.and.returnValue(throwError('err'));
facade.refreshTodos();
// TODO: check snackBar call.
});
});
});
describe('getCompletedTodos', () => {
it('should only return the completed todos', () => {
// TODO: we kind of already implemented this in the refreshTodo test, but it's not bad to write another test just for this method.
});
});
describe('getIncompleteTodos', () => {
it('should only return the incomplete todos', () => {
// TODO: we kind of already implemented this in the refreshTodo test, but it's not bad to write another test just for this method.
});
});
// TODO: the following methods are very similar, you don't have to test them all, but it's good practise nevertheless.
describe('createTodo', () => {
let todo: Todo;
beforeEach(() => {
todo = {
text: 'buy',
isCompleted: false
};
todoService.createTodo.and.returnValue(NEVER);
});
it('should start loading', () => {
});
it('should create the todo using the todoService', () => {
});
describe('given the call succeeds', () => {
it('should refresh the todos', () => {
});
});
describe('given the call fails', () => {
it('should display a snackBar message', () => {
});
});
});
describe('updateTodo', () => {
let todo: Todo;
beforeEach(() => {
todo = {
text: 'buy',
isCompleted: false
};
todoService.updateTodo.and.returnValue(NEVER);
});
it('should start loading', () => {
});
it('should create the todo using the todoService', () => {
});
describe('given the call succeeds', () => {
it('should refresh the todos', () => {
});
});
describe('given the call fails', () => {
it('should display a snackBar message', () => {
});
});
});
describe('deleteTodo', () => {
let todo: Todo;
beforeEach(() => {
todo = {
text: 'buy',
isCompleted: false
};
todoService.deleteTodo.and.returnValue(NEVER);
});
it('should start loading', () => {
});
it('should create the todo using the todoService', () => {
});
describe('given the call succeeds', () => {
it('should refresh the todos', () => {
});
});
describe('given the call fails', () => {
it('should display a snackBar message', () => {
});
});
});
});
<file_sep>/src/app/components/todo/todo.component.ts
import {Component, EventEmitter, Input, Output} from '@angular/core';
import {Todo} from '../../models/todo.model';
/*
* TODO: Display the description when a TODO has one. Should be in the form of "text - description"
* You can use the "todo_description" class to color the text gray.
* TODO: Add an edit and delete button. Use the fa-icon-button component. Icons should be 'edit' and 'delete'.
* Be sure to emit an event for both of these actions.
* And put them in the todo__actions element, this makes it so you need to hover over the component to see the icons.
* */
@Component({
selector: 'fa-todo',
template: `
<div class='todo'>
<mat-checkbox
(change)='toggleCompleted.emit()'
[checked]='todo.isCompleted'>
{{ todo.text }}
</mat-checkbox>
<div class='todo__actions'>
</div>
</div>
`,
styleUrls: ['./todo.component.scss']
})
export class TodoComponent {
@Input() todo: Todo;
@Output() toggleCompleted = new EventEmitter<void>();
}
<file_sep>/src/app/services/todo.service.spec.ts
import {async, TestBed} from '@angular/core/testing';
import SpyObj = jasmine.SpyObj;
import {TodoService} from '../services/todo.service';
import {HttpClientTestingModule, HttpTestingController} from '@angular/common/http/testing';
import {Todo} from '../models/todo.model';
// Interesting article about testing http: https://medium.com/netscape/testing-with-the-angular-httpclient-api-648203820712
describe('TodoService', () => {
let service: SpyObj<TodoService>;
let httpMock: HttpTestingController;
beforeEach(async(() => {
const component = TestBed.configureTestingModule({
imports: [
HttpClientTestingModule
],
providers: [
TodoService
]
});
component.compileComponents();
}));
beforeEach(() => {
service = TestBed.get(TodoService);
httpMock = TestBed.get(HttpTestingController);
});
describe('getTodos', () => {
it('should return the todos', () => {
const todos = [{
id: '1',
text: 'text',
description: 'description',
isCompleted: false
} as Todo];
// Call the function we want to test, since its using httpClient (which we mocked with the HttpTestingController)
// it won't immediately get a value.
service.getTodos().subscribe(result => {
expect(result).toEqual(todos);
});
// Expect the url to have been called.
const req = httpMock.expectOne('/todos');
// Check if the method was GET
expect(req.request.method).toEqual('GET');
// Pretend the call was completed by emitting an event, this will be on the observable returned by http.get().
// After this the observable we subscribe to above, will get a value and we can check if the results match.
req.flush(todos);
});
});
describe('createTodo', () => {
it('should create and return the todo', () => {
// TODO: implement, make sure the request returns a todo with an ID and check the request body.
});
});
describe('update', () => {
it('should update and return the todo', () => {
// TODO: implement, make sure the request to check the request body.
});
});
describe('delete', () => {
it('should delete the todo', () => {
// TODO: implement, be sure to subscribe, even though we won't be expecting data.
});
});
});
<file_sep>/src/app/components/todo/todo.component.spec.ts
import {async, ComponentFixture, TestBed} from '@angular/core/testing';
import {By} from '@angular/platform-browser';
import {CUSTOM_ELEMENTS_SCHEMA} from '@angular/core';
import {Todo} from '../../models/todo.model';
import {TodoComponent} from './todo.component';
describe('TodoComponent', () => {
let fixture: ComponentFixture<TodoComponent>;
let component: TodoComponent;
let todo: Todo;
// Some useful selectors (Can also be classes like .todo__actions, or ids #component123)
const CHECKBOX_SELECTOR = By.css('mat-checkbox');
const EDIT_BUTTON_SELECTOR = By.css('fa-icon-button[icon="edit"]');
const DELETE_BUTTON_SELECTOR = By.css('fa-icon-button[icon="delete"]');
beforeEach(async(() => {
// Angular setup
const component = TestBed.configureTestingModule({
declarations: [
TodoComponent // declare the component we want to test
],
schemas: [CUSTOM_ELEMENTS_SCHEMA]
});
component.compileComponents();
}));
beforeEach(() => {
fixture = TestBed.createComponent(TodoComponent); // Create the test fixture we'll use to our component
component = fixture.debugElement.componentInstance; // Get the instance of our component
todo = {
id: '1',
text: 'text',
description: 'description',
isCompleted: false
};
component.todo = todo; // Set some mock data on our component
fixture.detectChanges(); // Re-render template
});
it('should show a checkbox that is checked based on the isCompleted flag', () => {
const checkbox = fixture.debugElement.query(CHECKBOX_SELECTOR); // Find element the checkbox element
expect(checkbox.properties.checked).toEqual(false); // see if the property checked is false
todo.isCompleted = true; // Change state
fixture.detectChanges(); // Re-render template to see change
expect(checkbox.properties.checked).toEqual(true); // Check that our 2-way binding worked
});
it('should show the description if provided', () => {
const checkbox = fixture.debugElement.query(CHECKBOX_SELECTOR); // Give back an object of the class DebugElement
// Use the debugElement.nativeElement.innerText to check the text.
// TODO: implement
});
describe('when the user checks the checkbox', () => {
it('should emit the "toggleCompleted" event', () => {
spyOn(component.toggleCompleted, 'emit'); // Spy on the emit event
fixture.debugElement.query(CHECKBOX_SELECTOR).triggerEventHandler('change', undefined);
// Trigger the change event on the checkbox
expect(component.toggleCompleted.emit).toHaveBeenCalled(); // Check if our component emited the correct event.
});
});
describe('when the user clicks the edit button', () => {
it('should emit the "edit" event', () => {
// TODO: implement
});
});
describe('when the user clicks the delete button', () => {
it('should emit the "delete" event', () => {
// TODO: implement
});
});
});
<file_sep>/src/app/container/todo-form-dialog/todo-form-dialog.component.ts
import {Component, Inject, OnInit} from '@angular/core';
import {MAT_DIALOG_DATA, MatDialogRef} from '@angular/material';
import {Todo} from '../../models/todo.model';
import {FormGroup} from '@angular/forms';
/*
* TODO: Add form for title (required) and description (optional)
* TODO: Add cancel button
* TODO: Add submit button, make it disabled when the form is invalid.
* TODO: Display "Create todo" when no existing todo is passed to the component, else display "Update todo"
* TODO: When a todo is passed to the component, fill in the form with the data.
*
* Useful documentation:
* https://material.angular.io/components/dialog/overview
* https://angular.io/guide/reactive-forms
* */
@Component({
template: `
<div class='todo-form'>
<div class='todo-form__form'>
</div>
</div>
`,
styleUrls: ['./todo-form-dialog.component.scss']
})
export class TodoFormDialogComponent implements OnInit {
todoForm = new FormGroup({
});
constructor(private dialogRef: MatDialogRef<TodoFormDialogComponent>,
@Inject(MAT_DIALOG_DATA) private todo: Todo) {
}
ngOnInit() {
if (this.todo) {
this.todoForm.patchValue(this.todo);
}
}
}
<file_sep>/src/app/components/icon-button/icon-button.component.ts
import {Component, EventEmitter, Input, Output} from '@angular/core';
import {ThemePalette} from '@angular/material';
// TODO: add hovering so the iconColor changes.
@Component({
selector: 'fa-icon-button',
template: `
<button class='icon-button'
(click)='iconClick.emit()'>
<mat-icon [color]='iconColor'>
{{icon}}
</mat-icon>
</button>
`,
styleUrls: ['./icon-button.component.scss']
})
export class IconButtonComponent {
@Input() icon: string;
@Output() iconClick = new EventEmitter<void>();
iconColor: ThemePalette = 'primary';
}
<file_sep>/src/app/app.module.ts
import {BrowserModule} from '@angular/platform-browser';
import {NgModule} from '@angular/core';
import {AppRoutingModule} from './app-routing.module';
import {AppComponent} from './app.component';
import {BrowserAnimationsModule} from '@angular/platform-browser/animations';
import {
MatButtonModule,
MatCheckboxModule,
MatDialogModule, MatIconModule,
MatInputModule,
MatProgressSpinnerModule, MatSnackBarModule
} from '@angular/material';
import {TodoComponent} from './components/todo/todo.component';
import {TodoService} from './services/todo.service';
import {TodoListComponent} from './components/todo-list/todo-list.component';
import {HttpClientModule} from '@angular/common/http';
import {TodoOverviewContainer} from './container/todo-overview/todo-overview.container';
import {ReactiveFormsModule} from '@angular/forms';
import {NewTodoButtonComponent} from './components/new-todo-button/new-todo-button.component';
import {ColorPickerModule} from 'ngx-color-picker';
import {TodoFormDialogComponent} from './container/todo-form-dialog/todo-form-dialog.component';
import {SnackBarService} from './services/snack-bar.service';
import {IconButtonComponent} from './components/icon-button/icon-button.component';
import {TodoFacade} from './facade/todo.facade';
@NgModule({
declarations: [
AppComponent,
TodoComponent,
TodoListComponent,
TodoOverviewContainer,
NewTodoButtonComponent,
TodoFormDialogComponent,
IconButtonComponent
],
imports: [
BrowserModule,
BrowserAnimationsModule,
AppRoutingModule,
HttpClientModule,
MatButtonModule,
MatCheckboxModule,
MatProgressSpinnerModule,
ReactiveFormsModule,
MatInputModule,
ColorPickerModule,
MatDialogModule,
MatSnackBarModule,
MatIconModule
],
providers: [
TodoService,
TodoFacade,
SnackBarService
],
bootstrap: [AppComponent],
entryComponents: [
TodoFormDialogComponent
]
})
export class AppModule {
}
<file_sep>/src/app/components/new-todo-button/new-todo-button.component.ts
import {Component, EventEmitter, Output} from '@angular/core';
import {MatDialog, ThemePalette} from '@angular/material';
import {TodoFormDialogComponent} from '../../container/todo-form-dialog/todo-form-dialog.component';
import {Todo} from '../../models/todo.model';
@Component({
selector: 'fa-new-todo-button',
template: `
<fa-icon-button icon='add_box' (iconClick)='openCreateTodoDialog()'>
</fa-icon-button>
`,
styleUrls: ['./new-todo-button.component.scss']
})
export class NewTodoButtonComponent {
@Output() createTodo = new EventEmitter<Todo>();
constructor(private matDialog: MatDialog) {
}
openCreateTodoDialog() {
const dialog = this.matDialog.open(TodoFormDialogComponent);
dialog.afterClosed().subscribe(todo => {
if (todo) {
this.createTodo.emit(todo);
}
});
}
}
<file_sep>/src/app/container/todo-form-dialog/todo-form-dialog.component.spec.ts
import {async, ComponentFixture, TestBed} from '@angular/core/testing';
import {By} from '@angular/platform-browser';
import { NO_ERRORS_SCHEMA} from '@angular/core';
import {Todo} from '../../models/todo.model';
import {TodoFormDialogComponent} from './todo-form-dialog.component';
import {ReactiveFormsModule} from '@angular/forms';
import {MAT_DIALOG_DATA, MatDialogRef} from '@angular/material';
import SpyObj = jasmine.SpyObj;
describe('TodoFormDialogComponent', () => {
let fixture: ComponentFixture<TodoFormDialogComponent>;
let component: TodoFormDialogComponent;
let dialogRef: SpyObj<MatDialogRef<TodoFormDialogComponent>>;
let todo: Todo;
const TITLE_SELECTOR = By.css('h1');
const CANCEL_SELECTOR = By.css('.todo-form__cancel');
const SUBMIT_SELECTOR = By.css('.todo-form__submit');
beforeEach(async(() => {
const component = TestBed.configureTestingModule({
imports: [
ReactiveFormsModule
],
providers: [
{
provide: MatDialogRef,
useFactory: () => jasmine.createSpyObj('MatDialogRef', [
'close'
])
},
{
provide: MAT_DIALOG_DATA,
useFactory: () => todo
}
],
declarations: [
TodoFormDialogComponent
],
schemas: [NO_ERRORS_SCHEMA]
});
component.compileComponents();
}));
describe('when no todo is passed', () => {
beforeEach(() => {
fixture = TestBed.createComponent(TodoFormDialogComponent);
component = fixture.debugElement.componentInstance;
dialogRef = TestBed.get(MatDialogRef);
fixture.detectChanges();
});
it('should display an empty form', () => {
expect(component.todoForm.get('text').value).toEqual('');
expect(component.todoForm.get('description').value).toEqual('');
});
it('should display "Create todo" as the title', () => {
expect(fixture.debugElement.query(TITLE_SELECTOR).nativeElement.innerText).toEqual('Create todo');
});
describe('when the user clicks the cancel button', () => {
it('should close the dialog', () => {
fixture.debugElement.query(CANCEL_SELECTOR).triggerEventHandler('click', undefined);
expect(dialogRef.close).toHaveBeenCalled();
});
});
describe('when the form is invalid', () => {
it('should disable the submit button', () => {
expect(fixture.debugElement.query(SUBMIT_SELECTOR).properties.disabled).toBeTruthy();
});
});
describe('when the form is valid', () => {
it('should enable the submit button', () => {
component.todoForm.get('text').setValue('text');
fixture.detectChanges();
expect(fixture.debugElement.query(SUBMIT_SELECTOR).properties.disabled).toBeFalsy();
});
});
it('should pass the form value to the dialog sumbit button', () => {
component.todoForm.get('text').setValue('text');
fixture.detectChanges();
expect(fixture.debugElement.query(SUBMIT_SELECTOR).properties['mat-dialog-close']).toEqual({
text: 'text',
description: ''
});
});
});
describe('when a todo is passed', () => {
beforeEach(() => {
todo = {
id: '1',
text: 'text',
description: 'description',
isCompleted: true
};
fixture = TestBed.createComponent(TodoFormDialogComponent);
component = fixture.debugElement.componentInstance;
fixture.detectChanges();
});
it('should populate the form with the todo data', () => {
expect(component.todoForm.get('text').value).toEqual('text');
expect(component.todoForm.get('description').value).toEqual('description');
});
it('should display "Update todo" as the title', () => {
expect(fixture.debugElement.query(TITLE_SELECTOR).nativeElement.innerText).toEqual('Update todo');
});
});
});
<file_sep>/src/app/components/todo-list/todo-list.component.spec.ts
import {async, ComponentFixture, TestBed} from '@angular/core/testing';
import {By} from '@angular/platform-browser';
import {CUSTOM_ELEMENTS_SCHEMA} from '@angular/core';
import {Todo} from '../../models/todo.model';
import {TodoListComponent} from './todo-list.component';
describe('TodoListComponent', () => {
let fixture: ComponentFixture<TodoListComponent>;
let component: TodoListComponent;
let todos: Array<Todo>;
const TODO_SELECTOR = By.css('fa-todo');
const TODO_LIST_SELECTOR = By.css('.todo-list__list');
const EMPTY_SELECTOR = By.css('.todo-list__empty');
beforeEach(async(() => {
const component = TestBed.configureTestingModule({
declarations: [
TodoListComponent
],
schemas: [CUSTOM_ELEMENTS_SCHEMA]
});
component.compileComponents();
}));
beforeEach(() => {
fixture = TestBed.createComponent(TodoListComponent);
component = fixture.debugElement.componentInstance;
todos = [{
id: '1',
text: 'Pizza',
isCompleted: true
}, {
id: '2',
text: 'Pasta',
description: 'penne',
isCompleted: false
}];
component.todos = todos;
fixture.detectChanges();
});
describe('when the are todos', () => {
it('should display all the todos', () => {
const todoComponents = fixture.debugElement.queryAll(TODO_SELECTOR);
expect(todoComponents.length).toEqual(2);
expect(todoComponents[0].properties.todo).toEqual(todos[0]);
expect(todoComponents[1].properties.todo).toEqual(todos[1]);
});
it('should not display the emptyText', () => {
expect(fixture.debugElement.query(EMPTY_SELECTOR)).toBeNull();
});
describe('given the user toggles the completion of a todo', () => {
it('should emit the "toggleCompleted" event with the todo', () => {
// TODO: implement
});
});
describe('given the user wants to edit a todo', () => {
it('should emit the "edit" event with the todo', () => {
// TODO: implement
});
});
describe('given the user wants to delete a todo', () => {
it('should emit the "delete" event with the todo', () => {
// TODO: implement
});
});
});
describe('when there are no todos', () => {
beforeEach(() => {
// TODO: implement setup
fixture.detectChanges();
});
it('should display the emptyText', () => {
// TODO: implement
});
it('should hide the todo list', () => {
// TODO: implement
});
});
});
<file_sep>/src/app/components/new-todo-button/new-todo-button.component.spec.ts
import {async, ComponentFixture, TestBed} from '@angular/core/testing';
import {By} from '@angular/platform-browser';
import {CUSTOM_ELEMENTS_SCHEMA} from '@angular/core';
import {NewTodoButtonComponent} from './new-todo-button.component';
import {MatDialog, MatDialogRef} from '@angular/material';
import SpyObj = jasmine.SpyObj;
import {TodoFormDialogComponent} from '../../container/todo-form-dialog/todo-form-dialog.component';
import {NEVER, of} from 'rxjs';
import {Todo} from '../../models/todo.model';
describe('NewTodoButtonComponent', () => {
let fixture: ComponentFixture<NewTodoButtonComponent>;
let component: NewTodoButtonComponent;
let matDialog: SpyObj<MatDialog>;
let matDialogRef: SpyObj<MatDialogRef<TodoFormDialogComponent>>;
const ICON_BUTTON_SELECTOR = By.css('fa-icon-button');
beforeEach(async(() => {
const component = TestBed.configureTestingModule({
providers: [
{
provide: MatDialog,
useFactory: () => jasmine.createSpyObj('MatDialog', [
'open', 'afterClosed'
])
}
],
declarations: [
NewTodoButtonComponent
],
schemas: [CUSTOM_ELEMENTS_SCHEMA]
});
component.compileComponents();
}));
beforeEach(() => {
fixture = TestBed.createComponent(NewTodoButtonComponent);
component = fixture.debugElement.componentInstance;
matDialog = TestBed.get(MatDialog);
matDialogRef = jasmine.createSpyObj('MatDialogRef', ['afterClosed']);
matDialogRef.afterClosed.and.returnValue(NEVER);
matDialog.open.and.returnValue(matDialogRef);
fixture.detectChanges();
});
it('should the icon-button component with the "add_box" icon', () => {
expect(fixture.debugElement.query(ICON_BUTTON_SELECTOR).attributes.icon).toEqual('add_box');
});
describe('when the user has clicked on the icon', () => {
beforeEach(() => {
});
it('should open the "TodoFormDialogComponent" dialog', () => {
fixture.debugElement.query(ICON_BUTTON_SELECTOR).triggerEventHandler('iconClick', undefined);
expect(matDialog.open).toHaveBeenCalledWith(TodoFormDialogComponent);
});
describe('given the user submitted the dialog', () => {
it('should emit the "createTodo" event with the new todo', () => {
spyOn(component.createTodo, 'emit');
matDialogRef.afterClosed.and.returnValue(of({
text: 'text',
description: 'description',
} as Todo));
fixture.debugElement.query(ICON_BUTTON_SELECTOR).triggerEventHandler('iconClick', undefined);
expect(component.createTodo.emit).toHaveBeenCalledWith({
text: 'text',
description: 'description',
} as Todo);
});
});
describe('given the user has cancelled the dialog', () => {
it('should not emit the "createTodo" event', () => {
spyOn(component.createTodo, 'emit');
matDialogRef.afterClosed.and.returnValue(of(undefined));
fixture.debugElement.query(ICON_BUTTON_SELECTOR).triggerEventHandler('iconClick', undefined);
expect(component.createTodo.emit).not.toHaveBeenCalled();
});
});
});
});
<file_sep>/src/app/facade/todo.facade.ts
import {BehaviorSubject, Observable} from 'rxjs';
import {Todo} from '../models/todo.model';
import {Injectable} from '@angular/core';
import {TodoService} from '../services/todo.service';
/*
* Docs: https://www.learnrxjs.io/subjects/behaviorsubject.html (first example)
*
* TODO: (getCompletedTodos) Actually only return the completed todos instead of all of them.
* TODO: (getIncompleteTodos) Implement.
* TODO: (refreshTodos) Start loading, and stop loading when the call has completed.
* TODO: (create, update, delete) Implement, when successful, refresh the todos,
* when it fails, display a cool snackbar. (Using the SnackBarService)
*
* TODO: (BONUS) A lot of the code is the same for edit, update and delete, try making some of the logic
* abstract by using a function.
* */
@Injectable()
export class TodoFacade {
private todos$ = new BehaviorSubject<Array<Todo>>([]);
private isLoading$ = new BehaviorSubject<boolean>(false);
constructor(private todoService: TodoService) {
}
getCompletedTodos(): Observable<Array<Todo>> {
return this.todos$.asObservable();
}
getIncompleteTodos(): Observable<Array<Todo>> {
throw new Error('Not yet implemented');
}
isLoading(): Observable<boolean> {
return this.isLoading$.asObservable();
}
refreshTodos() {
this.todoService.getTodos()
.subscribe(todos => {
this.todos$.next(todos);
});
}
createTodo(todo: Todo) {
throw new Error('Not yet implemented');
}
updateTodo(todo: Todo) {
throw new Error('Not yet implemented');
}
deleteTodo(todo: Todo) {
throw new Error('Not yet implemented');
}
}
<file_sep>/src/app/components/todo-list/todo-list.component.ts
import {Component, EventEmitter, Input, Output} from '@angular/core';
import {Todo} from '../../models/todo.model';
/*
* TODO: Display all the todo's instead of just the first one.
* TODO: The todo component can send out events, capture all of them, and rethrow them.
* (When emitting the event; be sure to include which Todo is being edited, deleted, ...)
* TODO: Display the emptyText when there are no todos.
*
* */
@Component({
selector: 'fa-todo-list',
template: `
<div class='todo-list'>
<ul class='todo-list__list'>
<li>
<fa-todo
*ngIf='todos[0]'
[todo]='todos[0]'>
</fa-todo>
</li>
</ul>
<span class='todo-list__empty'>
</span>
</div>
`,
styleUrls: ['./todo-list.component.scss']
})
export class TodoListComponent {
@Input() todos: Array<Todo> = [];
@Input() emptyText: string;
@Output() toggleCompleted = new EventEmitter<Todo>();
@Output() edit = new EventEmitter<Todo>();
@Output() delete = new EventEmitter<Todo>();
}
<file_sep>/src/app/container/todo-overview/todo-overview.container.spec.ts
import {async, ComponentFixture, TestBed} from '@angular/core/testing';
import {By} from '@angular/platform-browser';
import {DebugElement, NO_ERRORS_SCHEMA} from '@angular/core';
import {ReactiveFormsModule} from '@angular/forms';
import {MatDialog, MatDialogRef} from '@angular/material';
import SpyObj = jasmine.SpyObj;
import {TodoOverviewContainer} from './todo-overview.container';
import {TodoFacade} from '../../facade/todo.facade';
import {NEVER, of} from 'rxjs';
import {Todo} from '../../models/todo.model';
import {TodoFormDialogComponent} from '../todo-form-dialog/todo-form-dialog.component';
describe('TodoOverviewContainer', () => {
let fixture: ComponentFixture<TodoOverviewContainer>;
let component: TodoOverviewContainer;
let dialog: SpyObj<MatDialog>;
let todoFacade: SpyObj<TodoFacade>;
let incompleteTodo: Todo;
let completedTodo: Todo;
const TODO_LIST_SELECTOR = By.css('fa-todo-list');
const SPINNER_SELECTOR = By.css('mat-spinner');
const NEW_TODO_BUTTON_SELECTOR = By.css('fa-new-todo-button');
beforeEach(async(() => {
const component = TestBed.configureTestingModule({
imports: [
ReactiveFormsModule
],
providers: [
// Provide the injectable and return mock objects.
{
provide: MatDialog,
useFactory: () => jasmine.createSpyObj('MatDialog', [
'open'
])
},
{
provide: TodoFacade,
useFactory: () => jasmine.createSpyObj('TodoFacade', [
'refreshTodos', 'createTodo', 'updateTodo', 'deleteTodo', 'isLoading',
'getIncompleteTodos', 'getCompletedTodos'
])
}
],
declarations: [
TodoOverviewContainer
],
schemas: [NO_ERRORS_SCHEMA]
});
component.compileComponents();
}));
beforeEach(() => {
fixture = TestBed.createComponent(TodoOverviewContainer);
component = fixture.debugElement.componentInstance;
dialog = TestBed.get(MatDialog);
todoFacade = TestBed.get(TodoFacade);
incompleteTodo = {
id: '2',
text: 'Pizza',
isCompleted: false
} as Todo;
todoFacade.getIncompleteTodos.and.returnValue(of([incompleteTodo])); // Don't forget to return a observable.
completedTodo = {
id: '1',
text: 'text',
description: 'description',
isCompleted: true
} as Todo;
todoFacade.getCompletedTodos.and.returnValue(of([completedTodo]));
fixture.detectChanges();
});
describe('when initializing the component', () => {
it('should refresh the todos', () => {
expect(todoFacade.refreshTodos).toHaveBeenCalled();
});
});
it('should display all the todos', () => {
fixture.detectChanges();
const todoLists = fixture.debugElement.queryAll(TODO_LIST_SELECTOR);
expect(todoLists[0].properties.todos).toEqual([{
id: '2',
text: 'Pizza',
isCompleted: false
} as Todo]);
// TODO: check second list.
});
describe('when loading', () => {
beforeEach(() => {
todoFacade.isLoading.and.returnValue(of(true));
fixture.detectChanges();
});
it('should show the spinner', () => {
// TODO: implement
});
it('should hide the todo list', () => {
// TODO: implement
});
});
describe('when the user creates a todo', () => {
it('should create the todo using the todoFacade', () => {
// TODO: implement, trigger button even with mock button.triggerEventHandler(method, data);
// TODO: Check todoFacade createTodo call.
});
});
describe('when the user toggles a todo', () => {
it('should toggle and update the todo using the todoFacade', () => {
/**
* TODO:
* get all todo lists on the page
* trigger toggleCompleted event on the first list with incompleteTodo as the parameter.
* check if todoFacade.updateTodo method had been called with correct param.
* reset updateTodo calls (todoFacade.updateTodo.calls.reset();)
* trigger toggleCompleted event on second list with completeTodo as the parameter.
* Check updateTodo again
*/
});
});
describe('when the user edits a todo', () => {
let dialogRef: SpyObj<MatDialogRef<TodoFormDialogComponent>>;
let todoLists: Array<DebugElement>;
beforeEach(() => {
dialogRef = jasmine.createSpyObj('MatDialogRef', ['afterClosed']);
dialogRef.afterClosed.and.returnValue(NEVER); // By default, never call the afterClosed event
dialog.open.and.returnValue(dialogRef);
todoLists = fixture.debugElement.queryAll(TODO_LIST_SELECTOR);
});
it('should show the "TodoFormDialogComponent" dialog with the given todo', () => {
todoLists[0].triggerEventHandler('edit', incompleteTodo);
expect(dialog.open).toHaveBeenCalledWith(TodoFormDialogComponent, {
data: {
id: '2',
text: 'Pizza',
isCompleted: false
} as Todo
});
dialog.open.calls.reset();
todoLists[1].triggerEventHandler('edit', completedTodo);
expect(dialog.open).toHaveBeenCalledWith(TodoFormDialogComponent, {
data: {
id: '1',
text: 'text',
description: 'description',
isCompleted: true
} as Todo
});
});
describe('given the user has submitted the dialog', () => {
it('should update the todo using the todoFacade', () => {
// Change the afterClosed function so it returns an observable which will immediately output a TodoModel.
// We do this BEFORE the user clicks on the edit button, because we have to setup our stubbed functions
// before the edit code is ran.
dialogRef.afterClosed.and.returnValue(of({
text: 'cool',
description: 'neat'
} as Todo));
// Simulate the edit
todoLists[0].triggerEventHandler('edit', incompleteTodo);
// After this line the afterClosed subscription will resolve because of the of({...}) stubbing we did earlier.
expect(todoFacade.updateTodo).toHaveBeenCalledWith({
id: '2',
text: 'cool',
description: 'neat',
isCompleted: false
} as Todo);
});
});
describe('given the user has cancelled the dialog', () => {
it('should not update the todo using the todoFacade', () => {
/**
* TODO:
* Make afterClosed return an observable which outputs undefined (the user cancelled the dialog)
* Click on the edit button.
* Check if the updateTodo function has not been called.
*/
});
});
});
describe('when the user deletes a todo', () => {
it('should delete the todo using the todoFacade', () => {
/**
* TODO:
* get all todo lists on the page
* trigger delete event on the first list with incompleteTodo as the parameter.
* check if todoFacade.deleteTodo method had been called with correct param.
* reset delete calls (todoFacade.deleteTodo.calls.reset();)
* trigger delete event on second list with completeTodo as the parameter.
* Check deleteTodo again
*/
});
});
});
<file_sep>/src/app/services/snack-bar.service.ts
import {Injectable} from '@angular/core';
import {MatSnackBar} from '@angular/material';
@Injectable()
export class SnackBarService {
private readonly CLOSE = 'Close';
constructor(private snackBar: MatSnackBar) {
}
showSnackBar(message: string) {
this.snackBar.open(message, this.CLOSE);
}
}
<file_sep>/README.md
# Todo
You can find the full app on the `solution` branch.
Every file has a list of TODO's that need to be implemented.
The best way to get started is to implement the following files, from top to bottom:
* todo.component.ts
* todo-list.component.ts
* todo-overview.container.ts
* todo.facade.ts
* todo-form-dialog.ts
Try and implement at least a couple of tests per component.
Be sure to read the already implemented tests, they serve as a useful example for the unimplemented tests.
Tests are using [jasmine](https://jasmine.github.io/tutorials/your_first_suite).
[More information about angular tests](https://angular.io/guide/testing#component-test-basics)
Lastly, if you only want to run a specific test, or set of tests, use `fdescribe` instead of `describe` and `fit` instead of `fit`.
Don't forget to remove to revert them afterwards though!
Since a lot of tests fail by default, try using `fdescribe` on the first describe in your test file, to only run those tests.
## Starting the app
Run `npm run start` (or `yarn start`). Navigate to `http://localhost:4200/`. The app will automatically reload if you change any of the source files.
## Starting the database
Run `npm run startdb` (or `yarn startdb`).
## Running unit tests
Run `ng test` to execute the unit tests via [Karma](https://karma-runner.github.io).
<file_sep>/src/app/container/todo-overview/todo-overview.container.ts
import {Component, OnInit} from '@angular/core';
import {Todo} from '../../models/todo.model';
import {MatDialog} from '@angular/material';
import {TodoFormDialogComponent} from '../todo-form-dialog/todo-form-dialog.component';
import {TodoFacade} from '../../facade/todo.facade';
/*
* For some of the following tasks you'll have to implement the facade functionality as well.
*
* TODO: Display both the completed and incomplete todo's
* TODO: React to the toggle, edit, update and delete events of the TodoListComponent. (Use the facade)
* TODO: Display a mat-spinner component (Material component) while loading.
* TODO: Hide the todo section while loading.
* TODO: Add the fa-new-todo-button component in the header, react to the createTodo event.
* Implement the tasks in the TodoFormDialogComponent
* */
@Component({
selector: 'fa-todo-overview',
template: `
<div class='todo-overview'>
<h1>Todo list</h1>
<ng-container>
<h2 class='todo-overview__header'>
Todo
<fa-new-todo-button (createTodo)='createTodo($event)'></fa-new-todo-button>
</h2>
<fa-todo-list
[todos]='todoFacade.getCompletedTodos() | async'>
</fa-todo-list>
</ng-container>
</div>
`,
styleUrls: ['./todo-overview.container.scss']
})
export class TodoOverviewContainer implements OnInit {
constructor(private matDialog: MatDialog,
private todoFacade: TodoFacade) {
}
ngOnInit() {
this.todoFacade.refreshTodos();
}
createTodo(todo: Todo) {
this.todoFacade.createTodo(todo);
}
toggleCompleted(todo: Todo) {
todo.isCompleted = !todo.isCompleted;
this.todoFacade.updateTodo(todo);
}
editTodo(todo: Todo) {
const dialog = this.matDialog.open(TodoFormDialogComponent, {
data: todo
});
dialog.afterClosed().subscribe(editedTodo => {
if (editedTodo) {
this.todoFacade.updateTodo({
...todo,
...editedTodo
});
}
});
}
deleteTodo(todo: Todo) {
this.todoFacade.deleteTodo(todo);
}
}
<file_sep>/src/app/services/todo.service.ts
import {Injectable} from '@angular/core';
import {Todo} from '../models/todo.model';
import {Observable} from 'rxjs';
import {HttpClient} from '@angular/common/http';
@Injectable()
export class TodoService {
constructor(private http: HttpClient) {
}
getTodos(): Observable<Array<Todo>> {
return this.http.get<Array<Todo>>('/todos');
}
createTodo(todo: Todo): Observable<Todo> {
return this.http.post<Todo>('/todos', todo);
}
updateTodo(todo: Todo): Observable<Todo> {
return this.http.put<Todo>(`/todos/${todo.id}`, todo);
}
deleteTodo(todo: Todo): Observable<void> {
return this.http.delete<void>(`/todos/${todo.id}`);
}
}
<file_sep>/src/app/components/icon-button/icon-button.component.spec.ts
import {async, ComponentFixture, TestBed} from '@angular/core/testing';
import {By} from '@angular/platform-browser';
import {IconButtonComponent} from './icon-button.component';
import {CUSTOM_ELEMENTS_SCHEMA} from '@angular/core';
describe('IconButtonComponent', () => {
let fixture: ComponentFixture<IconButtonComponent>;
let component: IconButtonComponent;
const MAT_ICON_SELECTOR = By.css('mat-icon');
const BUTTON_SELECTOR = By.css('button');
beforeEach(async(() => {
const component = TestBed.configureTestingModule({
declarations: [
IconButtonComponent
],
schemas: [CUSTOM_ELEMENTS_SCHEMA]
});
component.compileComponents();
}));
beforeEach(() => {
fixture = TestBed.createComponent(IconButtonComponent);
component = fixture.debugElement.componentInstance;
fixture.detectChanges();
});
it('should display the given icon', () => {
component.icon = 'add';
fixture.detectChanges();
expect(fixture.debugElement.query(MAT_ICON_SELECTOR).nativeElement.innerText).toEqual('add');
});
it('should have an icon with the "primary" color', () => {
expect(fixture.debugElement.query(MAT_ICON_SELECTOR).properties.color).toEqual('primary');
});
describe('when hovering over the button', () => {
beforeEach(() => {
fixture.debugElement.query(BUTTON_SELECTOR).triggerEventHandler('mouseenter', undefined);
fixture.detectChanges();
});
it('should change the icon color to "accent"', () => {
expect(fixture.debugElement.query(MAT_ICON_SELECTOR).properties.color).toEqual('accent');
});
describe('given the user stops hovering', () => {
it('should change the icon color back to "primary"', () => {
fixture.debugElement.query(BUTTON_SELECTOR).triggerEventHandler('mouseleave', undefined);
fixture.detectChanges();
expect(fixture.debugElement.query(MAT_ICON_SELECTOR).properties.color).toEqual('primary');
});
});
});
describe('when the user clicks the button', () => {
it('should emit the iconClick event', () => {
spyOn(component.iconClick, 'emit');
fixture.debugElement.query(BUTTON_SELECTOR).triggerEventHandler('click', undefined);
expect(component.iconClick.emit).toHaveBeenCalled();
});
});
});
<file_sep>/src/app/services/snack-bar.service.spec.ts
import {async, TestBed} from '@angular/core/testing';
import SpyObj = jasmine.SpyObj;
import {SnackBarService} from './snack-bar.service';
import {MatSnackBar} from '@angular/material';
describe('SnackBarService', () => {
let snackBar: SpyObj<MatSnackBar>;
let service: SnackBarService;
beforeEach(async(() => {
const component = TestBed.configureTestingModule({
providers: [
{
provide: MatSnackBar,
useFactory: () => jasmine.createSpyObj('MatSnackBar', ['open'])
},
SnackBarService
]
});
component.compileComponents();
}));
beforeEach(() => {
service = TestBed.get(SnackBarService);
snackBar = TestBed.get(MatSnackBar);
});
describe('showSnackBar', () => {
it('should open a snackbar with the given message', () => {
service.showSnackBar('help');
expect(snackBar.open).toHaveBeenCalledWith('help', 'Close');
});
});
});
|
d88a6b7d48c668479bc0e3e77659956119cfb086
|
[
"Markdown",
"TypeScript"
] | 20 |
TypeScript
|
faros/workshop-ui-todo
|
930c262c300f9f8703cccf9696a54778e2f08c0f
|
d6f733bb9fb2e6deac84c8f8995d4c09a6021ffa
|
refs/heads/master
|
<repo_name>unnaidan/ui-unn<file_sep>/redux/auth/actions.js
import {
SET_USER,
SET_ACCESS_TOKEN
} from './actionTypes'
const setUser = user => ({
type: SET_USER,
payload: {
user
}
})
const setAccessToken = accessToken => ({
type: SET_ACCESS_TOKEN,
payload: {
accessToken
}
})
export {
setUser,
setAccessToken
}
<file_sep>/hooks/useCachedResources.js
import * as React from 'react'
import * as Font from 'expo-font'
import * as SplashScreen from 'expo-splash-screen'
const useCachedResources = () => {
const [
complete,
setComplete
] = React.useState(false)
React.useEffect(() => {
loadFonts()
}, [])
const loadFonts = async () => {
try {
SplashScreen.preventAutoHideAsync()
await Font.loadAsync({
'Nunito-ExtraLight': require('./../assets/fonts/Nunito-ExtraLight.ttf'),
'Nunito-ExtraLightItalic': require('./../assets/fonts/Nunito-ExtraLightItalic.ttf'),
'Nunito-Light': require('./../assets/fonts/Nunito-Light.ttf'),
'Nunito-LightItalic': require('./../assets/fonts/Nunito-LightItalic.ttf'),
'Nunito-Regular': require('./../assets/fonts/Nunito-Regular.ttf'),
'Nunito-Italic': require('./../assets/fonts/Nunito-Italic.ttf'),
'Nunito-SemiBold': require('./../assets/fonts/Nunito-SemiBold.ttf'),
'Nunito-SemiBoldItalic': require('./../assets/fonts/Nunito-SemiBoldItalic.ttf'),
'Nunito-Bold': require('./../assets/fonts/Nunito-Bold.ttf'),
'Nunito-BoldItalic': require('./../assets/fonts/Nunito-BoldItalic.ttf'),
'Nunito-ExtraBold': require('./../assets/fonts/Nunito-ExtraBold.ttf'),
'Nunito-ExtraBoldItalic': require('./../assets/fonts/Nunito-ExtraBoldItalic.ttf'),
'Nunito-Black': require('./../assets/fonts/Nunito-Black.ttf'),
'Nunito-BlackItalic': require('./../assets/fonts/Nunito-BlackItalic.ttf')
})
} catch (err) {
//
} finally {
setComplete(true)
SplashScreen.hideAsync()
}
}
return complete;
}
export default useCachedResources<file_sep>/components/common/Text.js
// @flow
import * as React from 'react'
import * as ReactNative from 'react-native'
type Props = {
status: 'default' | 'hint',
category: 'h1' | 'h2' | 'h3' | 'h4' | 'h5' | 'h6' | 's1' | 's2' | 'p1' | 'p2' | 'caption'
}
const categories = ReactNative.StyleSheet.create({
h1: {
fontFamily: 'Nunito-Bold',
fontSize: 36
},
h2: {
fontFamily: 'Nunito-Bold',
fontSize: 32
},
h3: {
fontFamily: 'Nunito-Bold',
fontSize: 30
},
h4: {
fontFamily: 'Nunito-Bold',
fontSize: 26
},
h5: {
fontFamily: 'Nunito-Bold',
fontSize: 22
},
h6: {
fontFamily: 'Nunito-Bold',
fontSize: 18
},
s1: {
fontFamily: 'Nunito-Bold',
fontSize: 16
},
s2: {
fontFamily: 'Nunito-Bold',
fontSize: 14
},
p1: {
fontFamily: 'Nunito-Regular',
fontSize: 16
},
p2: {
fontFamily: 'Nunito-Regular',
fontSize: 14
},
caption: {
fontFamily: 'Nunito-Regular',
fontSize: 12
}
})
const statuses = ReactNative.StyleSheet.create({
default: {
color: '#353A3F'
},
hint: {
color: '#9A9D9F'
}
})
const StyledText = (props: Props) => {
const {
category,
status,
style: propStyle,
...other
} = props
const style = ReactNative.StyleSheet.compose(categories[category], statuses[status])
return (
<ReactNative.Text
{...other}
style={[
style,
propStyle
]}
/>
)
}
StyledText.defaultProps = {
category: 'p2',
status: 'default'
}
export default StyledText<file_sep>/screens/index.js
import Explore from './Explore'
import ForgotPassword from './ForgotPassword'
import ForgotPasswordVerify from './ForgotPasswordVerify'
import Home from './Home'
import Login from './Login'
import Profile from './Profile'
import Register from './Register'
import RegisterVerify from './RegisterVerify'
export {
Explore,
ForgotPassword,
ForgotPasswordVerify,
Home,
Login,
Profile,
Register,
RegisterVerify
}<file_sep>/components/common/Alert.js
// @flow
import * as React from 'react'
import * as ReactNative from 'react-native'
import * as VectorIcons from '@expo/vector-icons'
import Text from './Text'
const { AntDesign } = VectorIcons
const {
StyleSheet,
View
} = ReactNative
type Props = {
children: string,
icon: string
}
const styles = StyleSheet.create({
alert: {
flexDirection: 'row',
alignItems: 'center',
padding: 8,
paddingHorizontal: 16,
borderRadius: 4,
backgroundColor: 'rgba(15, 76, 129, 0.08)'
},
alertContent: {
color: '#0F4C81'
},
icon: {
marginRight: 16
},
text: {
flex: 1
}
})
const Alert = (props: Props) => {
const {
children,
icon,
style: propStyle,
...other
} = props
return (
<View
{...other}
style={[
styles.alert,
propStyle
]}
>
{icon &&
<AntDesign
name={icon}
size={24}
style={[
styles.alertContent,
styles.icon
]}
/>
}
<Text
category="p2"
style={[
styles.alertContent,
styles.text
]}
>
{children}
</Text>
</View >
)
}
export default Alert<file_sep>/contants/theme.js
export default {
shape: {
borderRadius: 4,
},
typography: {
h1: {
fontWeight: 700,
fontSize: 36
},
h2: {
fontWeight: 700,
fontSize: 32
},
h3: {
fontWeight: 700,
fontSize: 30
},
h4: {
fontWeight: 700,
fontSize: 26
},
h5: {
fontWeight: 700,
fontSize: 22
},
h6: {
fontWeight: 700,
fontSize: 18
},
s1: {
fontWeight: 700,
fontSize: 16
},
s2: {
fontWeight: 700,
fontSize: 14
},
p1: {
fontWeight: 400,
fontSize: 16
},
p2: {
fontWeight: 400,
fontSize: 14
},
caption: {
fontWeight: 400,
fontSize: 12
}
},
colors: {
divider: {
main: '#E6E6E7'
},
text: {
default: '#02090F',
hint: '#9A9D9F',
control: '#FFFFFF'
},
primary: {
main: '#0F4C81',
contrastText: '#FFFFFF'
},
basic: {
main: '#F6F6F6',
contrastText: '#02090F'
},
success: {
main: '#0F842F',
contrastText: '#FFFFFF'
},
info: {
main: '#1A4AB2',
contrastText: '#FFFFFF'
},
warning: {
main: '#E27900',
contrastText: '#FFFFFF'
},
error: {
main: '#9B1515',
contrastText: '#FFFFFF'
}
}
}<file_sep>/components/common/SimpleHeader.js
// @flow
import * as React from 'react'
import {
StyleSheet,
View
} from 'react-native'
import Header from './Header'
import HeaderAction from './HeaderAction'
import Text from './Text'
type Props = {
onPressBack: () => void,
title: string,
}
const styles = StyleSheet.create({
title: {
...StyleSheet.absoluteFill,
alignItems: 'center',
justifyContent: 'center'
}
})
const SimpleHeader = (props: Props) => {
const {
onPressBack,
title
} = props
return (
<Header>
<View style={styles.title}>
<Text category="h6">
{title}
</Text>
</View>
{onPressBack &&
<HeaderAction
icon="arrowleft"
onPress={onPressBack}
/>
}
</Header>
)
}
export default SimpleHeader<file_sep>/screens/Home.js
import React from 'react'
import {
SafeAreaView,
ScrollView,
StyleSheet,
View
} from 'react-native'
import {
Alert as AlertComponent,
Avatar,
Container,
Divider,
Input,
SimpleHeader,
Text
} from './../components/common'
const styles = StyleSheet.create({
title: {
marginTop: 32,
marginBottom: 8
},
subtitle: {
marginTop: 16,
marginBottom: 8
},
item: {
marginVertical: 8
},
horizontals: {
flexDirection: 'row',
alignItems: 'center',
marginHorizontal: -8
},
horizontal: {
margin: 8
}
})
const Home = props => {
return (
<React.Fragment>
<SimpleHeader title="Нүүр" />
<ScrollView keyboardShouldPersistTaps="handled">
<SafeAreaView>
<Container>
<Text
category="s1"
style={styles.title}
>
Typography
</Text>
<Text
category="s2"
status="hint"
style={styles.subtitle}
>
Category
</Text>
<Text category="h1">
H1
</Text>
<Text category="h2">
H2
</Text>
<Text category="h3">
H3
</Text>
<Text category="h4">
H4
</Text>
<Text category="h5">
H5
</Text>
<Text category="h6">
H6
</Text>
<Text category="s1">
S1
</Text>
<Text category="s2">
S2
</Text>
<Text category="p1">
P1
</Text>
<Text category="p2">
P2
</Text>
<Text category="caption">
caption
</Text>
<Text
category="s1"
style={styles.title}
>
Alert
</Text>
<Text
category="s2"
status="hint"
style={styles.subtitle}
>
Icon
</Text>
<AlertComponent
icon="bells"
style={styles.item}
>
Lorem ipsum dolor sit amet, consectetur adipiscing elit.
</AlertComponent>
<Text
category="s1"
style={styles.title}
>
Avatar
</Text>
<Text
category="s2"
status="hint"
style={styles.subtitle}
>
Size
</Text>
<View style={styles.horizontals}>
<Avatar
size="tiny"
style={styles.horizontal}
source={require('./../assets/images/avatar.jpg')}
/>
<Avatar
size="small"
style={styles.horizontal}
source={require('./../assets/images/avatar.jpg')}
/>
<Avatar
size="medium"
style={styles.horizontal}
source={require('./../assets/images/avatar.jpg')}
/>
<Avatar
size="large"
style={styles.horizontal}
source={require('./../assets/images/avatar.jpg')}
/>
<Avatar
size="giant"
style={styles.horizontal}
source={require('./../assets/images/avatar.jpg')}
/>
</View>
<Text
category="s1"
style={styles.title}
>
Divider
</Text>
<Divider style={styles.item} />
<Text
category="s1"
style={styles.title}
>
Input
</Text>
<Text
category="s2"
status="hint"
style={styles.subtitle}
>
Size
</Text>
<Input
style={styles.item}
placeholder="Small"
size="small"
/>
<Input
style={styles.item}
placeholder="Medium"
size="medium"
/>
<Input
style={styles.item}
placeholder="Large"
size="large"
/>
</Container >
</SafeAreaView >
</ScrollView >
</React.Fragment >
)
}
export default Home<file_sep>/components/common/Avatar.js
// @flow
import * as React from 'react'
import * as ReactNative from 'react-native'
type Props = {
size: 'tiny' | 'small' | 'medium' | 'large' | 'giant'
}
const styles = ReactNative.StyleSheet.create({
avatar: {
aspectRatio: 1,
borderRadius: 100
}
})
const sizes = ReactNative.StyleSheet.create({
tiny: {
width: 24,
height: 24
},
small: {
width: 32,
height: 32
},
medium: {
width: 40,
height: 40
},
large: {
width: 48,
height: 48
},
giant: {
width: 56,
height: 56
}
})
const Avatar = (props: Props) => {
const {
size,
style: propStyle,
...other
} = props
const style = ReactNative.StyleSheet.compose(styles.avatar, sizes[size])
return (
<ReactNative.Image
{...other}
style={[
style,
propStyle
]}
/>
)
}
Avatar.defaultProps = {
size: 'medium'
}
export default Avatar<file_sep>/components/common/HeaderAction.js
// @flow
import * as React from 'react'
import { AntDesign } from '@expo/vector-icons'
import {
StyleSheet,
TouchableWithoutFeedback
} from 'react-native'
type Props = {
icon: string
}
const styles = StyleSheet.create({
icon: {
color: '#353A3F'
}
})
const HeaderAction = (props: Props) => {
const {
icon,
...other
} = props
return (
<TouchableWithoutFeedback {...other}>
<AntDesign
name={icon}
size={24}
style={styles.icon}
/>
</TouchableWithoutFeedback>
)
}
export default HeaderAction<file_sep>/components/common/Header.js
// @flow
import * as React from 'react'
import Constants from 'expo-constants'
import { StatusBar } from 'expo-status-bar'
import {
StyleSheet,
View
} from 'react-native'
const styles = StyleSheet.create({
statusBar: {
height: Constants.statusBarHeight
},
header: {
position: 'relative',
height: 44,
paddingHorizontal: 16,
flexDirection: 'row',
alignItems: 'center',
borderBottomWidth: StyleSheet.hairlineWidth,
borderColor: '#E6E6E7'
}
})
const Header = (props) => {
const {
children,
style,
...other
} = props
return (
<React.Fragment>
<View style={styles.statusBar}></View>
<StatusBar
barStyle="default"
translucent
/>
<View
{...other}
style={[
styles.header,
style
]}
>
{children}
</View>
</React.Fragment>
)
}
export default Header<file_sep>/components/common/Input.js
// @flow
import * as React from 'react'
import * as ReactNative from 'react-native'
type Props = {
size: 'small' | 'medium' | 'large'
}
const styles = ReactNative.StyleSheet.create({
input: {
fontFamily: 'Nunito-Regular',
fontSize: 14,
borderBottomWidth: 1,
borderColor: '#E6E6E7',
color: '#353A3F'
}
})
const sizes = ReactNative.StyleSheet.create({
small: {
height: 32
},
medium: {
height: 40
},
large: {
height: 48
}
})
const StyledTextInput = (props: Props) => {
const {
size,
style: propStyle,
...other
} = props
const style = ReactNative.StyleSheet.compose(styles.input, sizes[size])
return (
<ReactNative.TextInput
{...other}
placeholderTextColor="#9A9D9F"
style={[
style,
propStyle
]}
/>
)
}
StyledTextInput.defaultProps = {
size: 'medium'
}
export default StyledTextInput<file_sep>/components/common/index.js
import Alert from './Alert'
import Avatar from './Avatar'
import Container from './Container'
import Divider from './Divider'
import Header from './Header'
import HeaderAction from './HeaderAction'
import Input from './Input'
import SimpleHeader from './SimpleHeader'
import Spacer from './Spacer'
import Text from './Text'
export {
Alert,
Avatar,
Container,
Divider,
Header,
HeaderAction,
Input,
SimpleHeader,
Spacer,
Text
}<file_sep>/components/common/Container.js
// @flow
import * as React from 'react'
import * as ReactNative from 'react-native'
const styles = ReactNative.StyleSheet.create({
container: {
padding: 16
}
})
const Container = (props) => {
const {
style,
...other
} = props
return (
<ReactNative.View
{...other}
style={[
styles.container,
style
]}
/>
)
}
export default Container<file_sep>/redux/auth/actionTypes.js
export const SET_USER = 'SET_USER'
export const SET_ACCESS_TOKEN = 'SET_ACCESS_TOKEN'
|
9b1ea6bc7d9670235d14f898f4db9463b8599b9b
|
[
"JavaScript"
] | 15 |
JavaScript
|
unnaidan/ui-unn
|
75dd8afe2a3a461b8b785e9228d971e56478dd59
|
d4a55709b3afd22915e9de4cc55456f676acc177
|
refs/heads/master
|
<repo_name>hazemkhaledmohamed/IMGshare<file_sep>/imgshare.sql
-- phpMyAdmin SQL Dump
-- version 4.5.1
-- http://www.phpmyadmin.net
--
-- Host: 127.0.0.1
-- Generation Time: Jun 23, 2017 at 11:45 PM
-- Server version: 10.1.19-MariaDB
-- PHP Version: 7.0.13
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8mb4 */;
--
-- Database: `imgshare`
--
-- --------------------------------------------------------
--
-- Table structure for table `comments`
--
CREATE TABLE `comments` (
`id` int(10) UNSIGNED NOT NULL,
`user_id` int(10) UNSIGNED NOT NULL,
`image_id` int(10) UNSIGNED NOT NULL,
`text` text COLLATE utf8_unicode_ci NOT NULL,
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
--
-- Dumping data for table `comments`
--
INSERT INTO `comments` (`id`, `user_id`, `image_id`, `text`, `created_at`, `updated_at`) VALUES
(3, 1, 4, 'commant for hmed ', '2017-06-21 00:36:50', '2017-06-21 00:36:50'),
(4, 1, 5, 'hi man', '2017-06-21 00:37:01', '2017-06-21 00:37:01'),
(5, 1, 4, 'this is good', '2017-06-21 00:48:56', '2017-06-21 00:48:56'),
(6, 1, 5, 'can you feel free to test this game ', '2017-06-21 00:49:11', '2017-06-21 00:49:11'),
(7, 1, 6, 'awesome image man', '2017-06-21 01:15:44', '2017-06-21 01:15:44'),
(8, 1, 6, '', '2017-06-21 01:24:45', '2017-06-21 01:24:45'),
(9, 1, 5, 'this is ahmed comments', '2017-06-23 12:37:33', '2017-06-23 12:37:33'),
(10, 3, 7, 'this is very nice image', '2017-06-23 12:44:08', '2017-06-23 12:44:08'),
(11, 3, 8, 'this is kube not baine', '2017-06-23 13:17:07', '2017-06-23 13:17:07');
-- --------------------------------------------------------
--
-- Table structure for table `images`
--
CREATE TABLE `images` (
`id` int(10) UNSIGNED NOT NULL,
`user_id` int(10) UNSIGNED NOT NULL,
`text` text COLLATE utf8_unicode_ci NOT NULL,
`title` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`url` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
--
-- Dumping data for table `images`
--
INSERT INTO `images` (`id`, `user_id`, `text`, `title`, `url`, `created_at`, `updated_at`) VALUES
(4, 1, 'this is description', 'this is me ahmed', '63736.jpg', '2017-06-21 00:35:25', '2017-06-21 00:35:25'),
(5, 1, 'this is soso', 'Sameh ', '80818.jpg', '2017-06-21 00:36:37', '2017-06-21 00:36:37'),
(6, 1, 'this is mekki', 'Ahmed mekki', '96891.jpg', '2017-06-21 01:15:29', '2017-06-21 01:15:29'),
(7, 3, 'this is my daughter comming from pokemon', 'My daughter', '44023.jpg', '2017-06-23 12:43:41', '2017-06-23 12:43:41'),
(8, 3, 'this is my boy bine', 'My Boy', '36268.jpg', '2017-06-23 13:16:51', '2017-06-23 13:16:51');
-- --------------------------------------------------------
--
-- Table structure for table `migrations`
--
CREATE TABLE `migrations` (
`id` int(10) UNSIGNED NOT NULL,
`migration` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`batch` int(11) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
--
-- Dumping data for table `migrations`
--
INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES
(1, '2014_10_12_000000_create_users_table', 1),
(2, '2014_10_12_100000_create_password_resets_table', 1),
(13, '2017_06_20_172123_create_images_table', 2),
(14, '2017_06_20_172558_create_comments_table', 2);
-- --------------------------------------------------------
--
-- Table structure for table `password_resets`
--
CREATE TABLE `password_resets` (
`email` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`token` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`created_at` timestamp NULL DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-- --------------------------------------------------------
--
-- Table structure for table `users`
--
CREATE TABLE `users` (
`id` int(10) UNSIGNED NOT NULL,
`name` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`email` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`password` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`remember_token` varchar(100) COLLATE utf8_unicode_ci DEFAULT NULL,
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
--
-- Dumping data for table `users`
--
INSERT INTO `users` (`id`, `name`, `email`, `password`, `remember_token`, `created_at`, `updated_at`) VALUES
(1, 'aa-<PASSWORD>', '<EMAIL>', '$2y$10$EqVEkrFRjj<PASSWORD>.RcJJr.1ZMDq/8TVh0SEMk0MqSjvNfgODvIuBe', 'olB4ZkIeP1hSCd8I2LPeH0wuzZExY05Z0upEawENrQ3M3wID9LN5k94bkcIx', '2017-06-20 15:20:57', '2017-06-23 12:39:23'),
(3, 'ah<NAME>', '<EMAIL>', <PASSWORD>', 'YXrI6JTZiLxyQX49jeTCxhpZv9qweB1JB1dmO4aoApHMLfZ4CeYs3xadYrNI', '2017-06-23 12:42:21', '2017-06-23 12:54:25');
--
-- Indexes for dumped tables
--
--
-- Indexes for table `comments`
--
ALTER TABLE `comments`
ADD PRIMARY KEY (`id`),
ADD KEY `comments_user_id_foreign` (`user_id`),
ADD KEY `comments_image_id_foreign` (`image_id`);
--
-- Indexes for table `images`
--
ALTER TABLE `images`
ADD PRIMARY KEY (`id`),
ADD KEY `images_user_id_foreign` (`user_id`);
--
-- Indexes for table `migrations`
--
ALTER TABLE `migrations`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `password_resets`
--
ALTER TABLE `password_resets`
ADD KEY `password_resets_email_index` (`email`),
ADD KEY `password_resets_token_index` (`token`);
--
-- Indexes for table `users`
--
ALTER TABLE `users`
ADD PRIMARY KEY (`id`),
ADD UNIQUE KEY `users_email_unique` (`email`);
--
-- AUTO_INCREMENT for dumped tables
--
--
-- AUTO_INCREMENT for table `comments`
--
ALTER TABLE `comments`
MODIFY `id` int(10) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=12;
--
-- AUTO_INCREMENT for table `images`
--
ALTER TABLE `images`
MODIFY `id` int(10) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=9;
--
-- AUTO_INCREMENT for table `migrations`
--
ALTER TABLE `migrations`
MODIFY `id` int(10) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=15;
--
-- AUTO_INCREMENT for table `users`
--
ALTER TABLE `users`
MODIFY `id` int(10) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=4;
--
-- Constraints for dumped tables
--
--
-- Constraints for table `comments`
--
ALTER TABLE `comments`
ADD CONSTRAINT `comments_image_id_foreign` FOREIGN KEY (`image_id`) REFERENCES `images` (`id`) ON DELETE CASCADE,
ADD CONSTRAINT `comments_user_id_foreign` FOREIGN KEY (`user_id`) REFERENCES `users` (`id`);
--
-- Constraints for table `images`
--
ALTER TABLE `images`
ADD CONSTRAINT `images_user_id_foreign` FOREIGN KEY (`user_id`) REFERENCES `users` (`id`);
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
<file_sep>/app/Http/Controllers/ImagesController.php
<?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use App\Image;
use Carbon\Carbon;
use DB;
class ImagesController extends Controller
{
/**
* Display a listing of the resource.
*
* @return \Illuminate\Http\Response
*/
public function __construct()
{
$this->middleware('auth', ['except'=>['index','show']]);
}
public function index()
{
$images = Image::all();
return view('images.index',compact('images'));
/*
$images = Image::where('title','like','%'.$search.'%')->orderBy('id');
return view('images.index',compact('images'));*/
}
/*
*search the images and get the results
*/
public function search()
{
$word = \Request::get('s');
$images = DB::table('images')
->where('title', 'like', '%'.$word.'%')
->get();
return view('images.search',compact('images'));
}
/**
* Show the form for creating a new resource.
*
* @return \Illuminate\Http\Response
*/
public function create()
{
return view('images.create');
}
/**
* Store a newly created resource in storage.
*
* @param \Illuminate\Http\Request $request
* @return \Illuminate\Http\Response
*/
public function store(Request $request)
{
// checking file is valid.
if ($request->file('url')->isValid())
{
//saving the main_image
$destinationPath = 'img'; // upload path
$extension = $request->file('url')->getClientOriginalExtension(); // getting image extension
$url = rand(11111,99999).'.'.$extension; // renameing image
$request->file('url')->move($destinationPath, $url); // uploading file to given path
}
else
{
return 'File is Not Valid';
return \Redirect::route('images.index');
}
$data = $request->all();
$data['url'] = $url;
Image::create($data);
return \Redirect::route('images.index');
}
/**
* Display the specified resource.
*
* @param int $id
* @return \Illuminate\Http\Response
*/
public function show($id)
{
// get the nerd
$image = Image::find($id);
// show the view and pass the nerd to it
return view('images.show',compact('image'));
}
/**
* Remove the specified resource from storage.
*
* @param int $id
* @return \Illuminate\Http\Response
*/
public function destroy($id)
{
$item = Image::findOrFail($id);
$item->delete();
return view('images.index');
}
}
<file_sep>/app/Http/Controllers/HomeController.php
<?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use App\Image;
use Auth;
use DB;
class HomeController extends Controller
{
/**
* Create a new controller instance.
*
* @return void
*/
public function __construct()
{
$this->middleware('auth',['except'=>'index']);
}
/**
* Show the application dashboard.
*
* @return \Illuminate\Http\Response
*/
public function index()
{
$images = Image::paginate(4);
return view('welcome',compact('images'));
}
public function profile()
{
$images = DB::table('images')->where('user_id', Auth::user()->id )->get();
return view('profile',compact('images'));
}
}
<file_sep>/README.md
# imgshare
is free and open source project for uploading, sharing images
# technologies
-Laravel 5.3<br>
-bootstrap template<br>
# install
-install xammp or wammp to run PHP7 <br>
-create your database and update the .env file with your database credential <br>
-run the migration to test the project using this command `php artisan migrate` <br>
-run the website using this command `php artisan serve` <br>
# troubleshoot
-if you have anyproblem with missing files run the command `composer dump-autoload`.<br>
-if you have anyproblem with missing CSRF_FIELD reload the page and try again.
|
baea314222a816acab2078dcf7108aca1e90450a
|
[
"Markdown",
"SQL",
"PHP"
] | 4 |
SQL
|
hazemkhaledmohamed/IMGshare
|
22ba2674c5ac557d0ea32d40b5e29cc9924839bf
|
7b3594c968daac041a74992343b3cc55d2fcfc86
|
refs/heads/master
|
<file_sep>import React from "react";
import HomeImg from "../../public/assets/images/home_bg.jpg";
import {
BrowserRouter as Router,
Link
} from "react-router-dom";
class Home extends React.Component{
render(){
const bgStyle = {
backgroundImage: `url(${HomeImg})`
}
return(
<>
<div className="home" >
<div className="head">
<div className="title">UI<span className="title-sub">What can we make?</span></div>
<Link to="/elements"><div className="sub"><span>Explore</span></div></Link>
</div>
<div className="home-footer">
<a href="https://github.com/aktcode/ui" target="blank">Github</a>
</div>
</div>
</>
)
}
}
export default Home;<file_sep>import React from "react";
/**
* This is the info section, it can be used to display info about ui components
* Props to pass : 'infoContent' and that's it!
* Caution: Only set simple text or basic html markdown as we are using dangerouslySetInnerHTML
*/
class Info extends React.Component{
constructor(props){
super(props);
}
setInfo(){
if(this.props.infoContent){
return {__html: this.props.infoContent};
}
return {__html: "None"};
}
render(){
return(
<div className={"info " +(this.props.infoContent ? '' : 'disNone')}>
<div id="aboutText">
<span dangerouslySetInnerHTML={this.setInfo()}></span>
</div>
<div id="about">About</div>
</div>
)
}
}
export default Info;<file_sep>import React, { Component} from "react";
import Info from "../info.js";
class Form extends Component{
constructor(props){
super(props);
this.state= {
descriptionAvailable : true
}
}
render(){
return(
<div className="formContainer">
<div className={"form" + this.props.variant}>
<span id="head">Log<span id="in">in</span></span>
<input type="text" id="inp1" placeholder="User"></input>
<br />
<input type="password" id="inp2" placeholder="<PASSWORD>"></input>
<br />
<span id="submit">Submit</span>
</div>
<Info infoContent={this.props.formInfo} />
</div>
)
}
}
export default Form;<file_sep># UI components
Vist the [current version](https://aktcode.github.io/ui/public/)
### UI components that can be usefull and fun.
Building various UI components or basically anything on a codebase that is minimal and easy to work on. The project aims to compile various components/objects that one could imagine, by keeping things both integrated(to reduce unnecessary keeping up) and isolated(to easily pull out components) simultaneously. The components have been distributed into categories and the <b>Others</b> category is the one to look out as it can contain anything.

[WIKI](https://github.com/aktcode/ui/wiki)
<file_sep>import React, { Component} from "react";
import Card from "./other/card.js";
import Info from "./info.js";
import Phone from "./other/phone.js";
class Other extends Component{
constructor(props){
super(props);
}
render(){
let otherStage;
switch(this.props.variant){
case 'Card':
otherStage = <Card />
break;
case "Phone":
otherStage = <Phone />
break;
default:
otherStage = <div>DEFAULT</div>
}
return(
<div className="header">
{otherStage}
<Info infoContent={this.props.othersInfo} />
</div>
)
}
}
export default Other;<file_sep>import React, { Component} from "react";
import CardLogo from "../../../public/assets/images/card_bg.svg";
import Data from '../../data/main.js';
class Card extends Component{
render(){
const bgContStyle = {
backgroundImage: `url(${CardLogo})`
};
return(
<div className="cardContainer">
<span className="card">
<span className="bgCont" style={bgContStyle}></span>
<span className="top-right">Bank</span>
<span className="details">
<span className="number">{Data.dummy.get("cardNumber")}
<span className="expiry">{Data.dummy.get("cardExpiry")}</span>
</span>
<span className="name">{Data.dummy.get("name")}</span>
{/* <img src={CardLogo} /> */}
</span>
</span>
</div>
)
}
}
export default Card;<file_sep>import React, { Component} from "react";
import Data from '../../data/main.js';
import PhoneBg from "../../../public/assets/images/phone_bg.jpg";
class Phone extends Component{
constructor(props){
super(props);
this.state = {
totalShims : [1,2,3,4,5]
}
}
render(){
const bgContStyle = {
backgroundImage: `url(${PhoneBg})`
};
return(
<div className="phoneContainer">
<div className="phone">
<span className="bgCont" style={bgContStyle}></span>
<span className="details">
<span className="time">12 : 45</span>
<span className="more">12 Notifications</span>
<span className="shimBars">
{this.state.totalShims.map((item)=> {
return <span className="shim" key={item}></span>
})}
</span>
</span>
<span className="swipe-up">
<span>Swipe up</span></span>
</div>
</div>
)
}
}
export default Phone;<file_sep>import React, { Component} from "react"
import Form from "./forms/form.js"
import Other from "./other.js"
import Data from '../data/main.js'
import Search from "./search/search.js"
import {
BrowserRouter as Router,
Switch,
Route,
Link
} from "react-router-dom";
class Sidebar extends Component{
constructor(props){
super(props);
/*
** How to create a new category of components : -> (like form,search...)
** 1. Add the category name to components array
** 2. Create a new object with the same name(with three entities-> elements,variant,info)
** 3. For info, create an entry with again the same name in data/main.js in [ variant : 'the info text'] format. This step
** is not important for every sub component. But the main entry is important!
*/
this.state = {
stageOccupier: 'form',
components: ['form','search','other'],
form: {
elements: [1 ,2, 3],
variant : 1,
info: Data.form.get(1)
},
other : {
elements: ['Card' , "Phone"],
variant: 'Card',
info: Data.other.get('Card')
},
search: {
elements: [1, 2],
variant: 1,
info: Data.search.get(1)
}
}
}
componentDidMount(){
}
addComponent(uniqueKey , componentObject){
return(
<>
<span className={"eleHead "+(this.state.stageOccupier == uniqueKey ? 'active' : '')} onClick={() => this.setState({stageOccupier: uniqueKey})}>{uniqueKey.charAt(0).toUpperCase() + uniqueKey.slice(1)}</span>
<ul className={this.state.stageOccupier != uniqueKey ? 'disNone' : ''}>
{ componentObject.elements.map((item) => {
let currentComponent = {
elements: componentObject.elements,
variant: item,
info: Data[uniqueKey].get(item)
}
return(<li id={(item == componentObject.variant && this.state.stageOccupier == uniqueKey) ? 'active' : ''} onClick={() => this.setState({[uniqueKey] : currentComponent})} key={item}>{item}</li>)
})
}
</ul>
</>
)
}
render(){
let stage;
switch(this.state.stageOccupier){
case 'form':
stage = <Form variant={this.state.form.variant} formInfo={this.state.form.info} />
break;
case 'other':
stage = <Other variant={this.state.other.variant} othersInfo={this.state.other.info}/>
break;
case 'search':
stage = <Search variant={this.state.search.variant} searchInfo={this.state.search.info} />
}
return(
<>
<div className="sidebar">
<Link to="/"><span className="head">Elements</span></Link>
<div className="ele">
{ this.state.components.map((componentName) => {
return this.addComponent(componentName, this.state[componentName])
})
}
</div>
</div>
{/* STAGE */}
{stage}
{/* STAGE */}
</>
)
}
}
export default Sidebar;
|
7412e89151fb6d42ec6dbf25ca0fbdf35dbad941
|
[
"JavaScript",
"Markdown"
] | 8 |
JavaScript
|
aktcode/ui
|
77408fabd3b3620a2545a59c5125cfb206e490c5
|
49f03cafb0184913ef18f836089e65c800ccb1e2
|
refs/heads/master
|
<repo_name>ICTU/stomp-api<file_sep>/index.js
var StompLib = require('./stomp-lib.js')
var express = require('express')
var bodyParser = require('body-parser')
var app = express()
app.use(bodyParser.json())
app.put('/queues/subscriptions', function (req, res) {
StompLib.subscribe(process.env.host, process.env.port, req.body.queue, 'queue');
return res.json(req.body);
})
app.put('/topics/subscriptions', function (req, res) {
StompLib.subscribe(process.env.host, process.env.port, req.body.topic, 'topic');
return res.json(req.body);
})
app.put('/queues/:queue*', function (req, res) {
StompLib.publishMessage(process.env.host, process.env.port, req.params.queue, 'queue', req.body.message);
return res.json(req.body);
})
app.put('/topics/:topic*', function (req, res) {
StompLib.publishMessage(process.env.host, process.env.port, req.params.topic, 'topic', req.body.message);
return res.json(req.body);
})
app.get('/queues/:queue*', function (req, res) {
return res.json(getMessages(req.params.queue));
})
app.get('/topics/:topic', function (req, res) {
return res.json(getMessages(req.params.topic));
})
app.get('/queues/:queue/pop', function (req, res) {
StompLib.pop(req.params.queue).then(function (result) {
return res.json(result);
});
})
app.get('/topics/:topic/pop', function (req, res) {
StompLib.pop(req.params.topic, function (result) {
return res.json(result);
});
})
app.listen(3000, function () {
console.log('Listening on port 3000!')
})
function getMessages(destination) {
var messages = StompLib.getMessages(destination);
StompLib.flush(destination);
return messages;
}
<file_sep>/stomp-lib.js
var Stomp = require('stomp-client');
var stomp = require('stompy');
var JSONPath = require('jsonpath-plus');
var Q = require('q')
var subscriptions = {};
var publishers = {};
var context = {};
function getDestination(destination, destinationType) {
return '/' + destinationType + '/' + destination;
}
function publishMessage(host, port, destination, destinationType, message) {
var id = destination + ':' + destinationType;
var publisher = publishers[id] = publishers[id] || stomp.createClient(
{
host: host,
port: port,
retryOnClosed: true,
});
publisher.publish('/' + destinationType + '/' + destination, message);
}
function subscribe(host, port, destination, destinationType) {
var dest = getDestination(destination, destinationType);
if(subscriptions[destination]) {
return;
}
var subscription = subscriptions[destination] = new Stomp(host, port);
msgctx = context.__STOMP__ = context.__STOMP__ || {}
msgctx[destination] = msgctx[destination] || [];
subscription.connect(function (sessionId) {
subscription.subscribe(dest, function (body, headers) {
var msg = {
body: body,
headers: headers
};
msgctx[destination].push(msg);
});
});
}
function unsubscribe(destination, destinationType) {
var dest = getDestination(destination, destinationType);
subscriptions[topic].unsubscribe(dest);
subscriptions[topic].disconnect();
subscriptions[topic] = null;
}
function getMessages(destination) {
return context.__STOMP__[destination];
}
function pop(destination, callback) {
var result = context.__STOMP__[destination].pop();
callback(result);
}
function flush(destination) {
context.__STOMP__[destination] = [];
}
module.exports = {
'subscribe': subscribe,
'unsubscribe': unsubscribe,
'getMessages': getMessages,
'flush': flush,
'publishMessage': publishMessage,
'pop': pop
};<file_sep>/README.md
#stomp-api
A REST-full interface for a STOMP-queue (e.g. ActiveMQ);
=====
## Disclaimer
Do not use this api for production, it is built for testing purposes (i.e. no proper exception handling).
## How does it work
You can subscribe to a queue or a topic, publish (put) messages and show all received messages.
You need to specify the following environment variables:
```
host=<hostname of the queue>
port=<the STOMP port>
```
## Running in docker
```
docker build -t stomp-api .
docker run -it --rm -p 3000:3000 --name stomp-api-service -e host=<queue host> -e port=<stomp port> stomp-api
```
## API by example
To subscribe to a queue (this will start buffering all incoming messages):
PUT the following application/json to URL: localhost:3000/queues/subscriptions
```
{
"queue": "test"
}
```
To publish to a queue:
PUT the following application/json to URL: localhost:3000/queues/test
```
{
"message": "Test message"
}
```
To get all messages received (since subscribing) from a queue:
GET the following URL: localhost:3000/queues/test
This will empty the message buffer.
These examples also apply for topics.
|
f4de934d3348ae65a388f523b9982039f229b763
|
[
"JavaScript",
"Markdown"
] | 3 |
JavaScript
|
ICTU/stomp-api
|
2d9651e5e3197923746446b221877780871db982
|
b90a5e055da42b2415f7d090acb71318885771f6
|
refs/heads/master
|
<repo_name>castorfaener/JSCourse<file_sep>/Javascript/Hola-mundo/js/14-Ejercicio4.js
'use strict'
/*
Muestra los numeros impares comprendidso entre dos numeros introducidos por el usuario
*/
var numero1 = 0;
var numero2 = 0;
var max;
var min;
var espar;
do{
numero1 = parseInt(prompt("Introduce el primer número")); //Verificamos que el dato introducido sea un numero
}while(isNaN(numero1))
do{
numero2 = parseInt(prompt("Introduce el segundo número")); //Verificamos que el dato introducido sea un numero
}while(isNaN(numero2))
if(numero1>numero2){
max=numero1;
min=numero2;
document.write("<h1>Los numeros impares entre " + min + " y " + max + " son:</h1>");
for(var i=min+1;i<max;i++){
if(i%2 == 1){
document.write(i);
document.write("<br/>");
}
}
}else if(numero1<numero2){
max=numero2;
min=numero1;
document.write("<h1>Los numeros impares entre " + min + " y " + max + " son:</h1>");
for(var i=min+1;i<max;i++){
if(i%2 == 1){
document.write(i);
document.write("<br/>");
}
}
}else{
alert("Los numeros introducidos son iguales");
}
<file_sep>/Javascript/Hola-mundo/js/25-Arrays.js
'use strict'
//Arrays
var nombres = ["<NAME>","<NAME>","<NAME>",52,true];
console.log(nombres); //Mostramos todo el Array
console.log(nombres[1]); //Mostramos el segundo elemento del Array
console.log(nombres.length); //Mostramos el numero de elementos
/*
var elemento = parseInt(prompt("¿Que elemento del array quieres?",0));
if(elemento>=nombres.length){
alert("Introduce un numero inferior a " +nombres.length);
}else{
alert(nombres[elemento]);
}
*/
document.write("<h1>Listado de nombres</h1>"); //recorremos el array y lo mostramos en una lista en la web
document.write("<ul>");
for(var i=0;i<nombres.length;i++){
document.write("<li>"+nombres[i]+"</li>");
}
document.write("</ul>");
<file_sep>/Javascript/aprendiendo-jquery/js/04-Efectos.js
$(document).ready(function(){
$('#caja').hide();
$('#ocultar').hide();
$("#mostrar").show();
$("#mostrar").click(function(){
$('#caja').show('normal');
$(this).hide();
$("#ocultar").show();
});
$("#ocultar").click(function(){
$('#caja').hide('normal');
$(this).hide();
$("#mostrar").show();
});
});<file_sep>/Javascript/Hola-mundo/js/12-Ejercicio2.js
'use strict'
//Utilizando un bucle, mostrar la media y la suma de los numeros introducidos por el usuario
//hasta que introduzcamos un numero negativo, que en ese caso mostrariamos el resultado
var numero = 0;
var contador = 0;
var suma = 0;
while(numero>=0){ //mientras el último numero sea positivo seguimos pidiendo numeros
numero = parseInt(prompt("Introduce un numero"));
if(numero<0){ //En caso de ser negativo salimos del bucle
break;
}
contador++;
suma = suma + numero;
}
var media = suma/contador;
alert("La suma de todos los numeros es " + suma + " y la media es " + media);<file_sep>/Javascript/Hola-mundo/js/28-DOM.js
'use strict'
// DOM - Document Object Model
function cambiaColor(color){
caja.style.background = color;
}
//conseguir elementos con un ID concreto
var caja=document.getElementById("micaja");
console.log(caja);
caja.innerHTML = "He modificado el archivo";
caja.style.background = "red";
caja.style.padding = "20px";
caja.style.color = "white";
//conseguir elementos por su etiqueta
var todosLosDivs = document.getElementsByTagName("div");
todosLosDivs[2].style.background="green";
todosLosDivs[1].style.background="yellow";
console.log(todosLosDivs);
//conseguir elementos por su clase
<file_sep>/Javascript/Hola-mundo/js/13-Ejercicio3.js
'use strict'
/*
Introducir dos números y mostrar por pantalla los numeros que hay entre ellos
*/
var numero1 = 0;
var numero2 = 0;
var max;
var min;
do{
numero1 = parseInt(prompt("Introduce el primer número")); //Verificamos que el dato introducido sea un numero
}while(isNaN(numero1))
do{
numero2 = parseInt(prompt("Introduce el segundo número")); //Verificamos que el dato introducido sea un numero
}while(isNaN(numero2))
if(numero1>numero2){
max=numero1;
min=numero2;
console.log("Los numeros comprendidos entre " + min + " y " + max + " son:");
for(var i=min+1;i<max;i++){
console.log(i);
}
}else if(numero1<numero2){
max=numero2;
min=numero1;
console.log("Los numeros comprendidos entre " + min + " y " + max + " son:");
for(var i=min+1;i<max;i++){
console.log(i);
}
}else{
alert("Los numeros introducidos son iguales");
}
<file_sep>/Javascript/Hola-mundo/js/29-BOM.js
'use strict'
//Browser Object Model
console.log(window.innerHeight);
console.log(window.innerWidth); //Tamaño de navegador
console.log(screen.width); //Tamaño pantalla
console.log(screen.height);
console.log(window.location.href); //pagina en la que nos encontramos
function abrirVentana(url){
window.open(url);
}
<file_sep>/Javascript/Hola-mundo/js/17-Ejercicio7.js
'use strict'
/*
Tabla de multiplicar de un numero introducido por pantalla
*/
var numero = parseInt(prompt("Introduce un numero"));
document.write("<h2> La tabla de multiplicar de " + numero + " es:</h2>");
for(var i=1;i<=10;i++){
document.write("<h5>"+numero+" x "+i+" = "+(numero * i)+"</h5>");
//document.write("<br/>");
}<file_sep>/Javascript/Hola-mundo/js/18-Ejercicio8.js
'use strict'
/*
Calculadora:
1. Pida dos numeros por pantalla
2. Si metemos uno mal que nos lo vuelva a pedir
3. muestra en el body, en una alerta y en la consola el resultado de sumar,
restar, multiplicar y dividir las dos cifras.
*/
do{
var numero1 = parseInt(prompt("Introduce el primer número"));
if(isNaN(numero1)){
alert("Dato no válido");
}
}while(isNaN(numero1))
do{
var numero2 = parseInt(prompt("Introduce el segundo número"));
if(isNaN(numero2)){
alert("Dato no válido");
}
}while(isNaN(numero2))
//Mostramos en el body del documento
document.write("<h2>Las operaciones con los numeros " + numero1 + " y " + numero2 + " son:</h2>");
document.write("<h3>Suma = " + (numero1 + numero2) + "</h3>");
document.write("<h3>Resta = " + (numero1 - numero2) + "</h3>");
document.write("<h3>Multiplicacion = " + (numero1 * numero2) + "</h3>");
document.write("<h3>División = " + (numero1 / numero2) + "</h3>");
//Mostramos en el console log
console.log("Las operaciones con los numeros " + numero1 + " y " + numero2 + " son:");
console.log("Suma = " + (numero1 + numero2));
console.log("Resta = " + (numero1 - numero2));
console.log("Multiplicacion = " + (numero1 * numero2));
console.log("División = " + (numero1 / numero2));
//Mostramos como alert
alert("Las operaciones con los numeros " + numero1 + " y " + numero2 + " son:");
alert("Suma = " + (numero1 + numero2) );
alert("Resta = " + (numero1 - numero2) );
alert("Multiplicacion = " + (numero1 * numero2) );
alert("División = " + (numero1 / numero2) );<file_sep>/Javascript/Hola-mundo/js/06-condicionales.js
'use strict'
//Condicional If
var edad = 12;
var nombre = "<NAME>"
if(edad >= 18){
// Es mayor de edad
console.log(nombre + " tiene " + edad + " años, es mayor de edad.");
}else{
console.log(nombre + " tiene " + edad + " años, es menor de edad.");
}
<file_sep>/Javascript/aprendiendo-jquery/js/03-textos.js
$(document).ready(function(){
reload_link();
boton = $("#add_button");
nuevo_link = $("#add_link");
boton.click(function(){
$("#menu").append('<li><a href="'+$("#add_link").val()+'"></a></li>'); //Añadimos el nuevo link introducido en el input
reload_link();
});
function reload_link(){
$('a').each(function(index){ //Recorremos todos los elementos 'a' del DOM
var enlace = $(this).attr("href"); //capturamos el enlace contenido en href
$(this).text(enlace); //Lo añadimos al DOM para poder visualizarlo en la web
});
}
});<file_sep>/Javascript/Hola-mundo/js/02-variables.js
'use strict'
//VARIABLES
var pais = "España";
var continente = "Europa";
var antiguedad = 2019;
let prueba = "hola";
console.log(pais, continente, antiguedad);
<file_sep>/Javascript/Hola-mundo/js/20-Parametros2.js
'use strict'
//Parametros REST y SPREAD
//Ejemplo Rest
function listadoFrutas(fruta1, fruta2, ...restoDeFrutas){
console.log("Fruta 1: " + fruta1);
console.log("Fruta 2: " + fruta2);
console.log(restoDeFrutas); //mostramos el siguiente parámetro
console.log(...restoDeFrutas); //mostramos todos los parametros que se han pasado en la llamada a la funcion
}
listadoFrutas("Naranja", "Manzana", "Sandia", "Coco");
//Ejemplo Spread
var frutas = ["Fresa","Piña"];
listadoFrutas(...frutas, "Pera", "Naranja", "Aguacate");<file_sep>/Javascript/aprendiendo-jquery/js/02-Eventos.js
$(document).ready(function(){
//MouseOver y MouseOut
var caja = $('#caja');
/*
caja.mouseover(function(){
$(this).css("background", "red");
});
caja.mouseout(function(){
$(this).css("background", "green");
});
*/
//Hover
function cambiaRojo(){
$(this).css("background", "red");
}
function cambiaVerde(){
$(this).css("background", "green");
}
caja.hover(cambiaRojo, cambiaVerde);
//Click, doble click
caja.click(function(){
$(this).css("background", "blue")
.css("color","white");
});
caja.dblclick(function(){
$(this).css("background","pink")
.css("color","yellow");
});
//Focus y Blur
$("#nombre").focus(function(){
$(this).css("border","2px solid green");
});
$("#nombre").blur(function(){
$(this).css("border","1px solid grey");
$("#datos").text($(this).val()).show();
});
//Mousedown y mouseup
var datos = $("#datos");
var nombre = $("#nombre");
datos.mousedown(function(){
$(this).css("border-color","grey");
});
datos.mouseup(function(){
$(this).css("border-color","black");
});
//Mousemove
$(document).mousemove(function(){
$("#sigueme").css("left",event.clientX);
$("#sigueme").css("top",event.clientY);
});
});<file_sep>/Javascript/Hola-mundo/js/30-Eventos.js
'use strict'
// Eventos del raton
function cambiarColor(){
var bg=boton.style.background;
if(bg =="green"){
boton.style.background ="red";
console.log("green");
}else{
boton.style.background ="green";
console.log("red");
}
}
var boton = document.querySelector("#boton");
//Click
boton.addEventListener('click', function(){cambiarColor();} );
//Mouse over
boton.addEventListener('mouseover', function(){
boton.style.background="#ccc";
});
//Mouse out
boton.addEventListener('mouseout', function(){
boton.style.background="yellow";
});
//Focus
var input = document.querySelector("#campo_nombre");
input.addEventListener('focus', function(){
console.log("[Focus] Estas dentro del input");
});
//Blur
input.addEventListener('blur', function(){
console.log("[Blur] Estas fuera del input");
});
//Keydown
input.addEventListener('keydown', function(){
console.log("[keydown] Pulsando esta tecla" , String.fromCharCode(event.keyCode));
});
//Keypress
input.addEventListener('keypress', function(){
console.log("[keypress] Tecla presionada" , String.fromCharCode(event.keyCode));
});
//Keyup
input.addEventListener('keyup', function(){
console.log("[keyup] Tecla soltada" , String.fromCharCode(event.keyCode));
});
<file_sep>/Javascript/Hola-mundo/js/26-Arrays-multi.js
'use strict'
var categorias = ['Accion','Terror','Cpmedia'];
var peliculas = ['La verdad duele','La vida es bella','Gran Torino'];
var cine = [categorias,peliculas];
console.log(cine);
console.log(cine[0][1]);
console.log(cine[1][2]);
peliculas.push("Batman"); //Introducimos un nuevo elemento en el array
console.log(peliculas);
peliculas.sort(); //ordenamos los elementos alfabeticamente
peliculas.reverse(); //ordenamos los elementos a la inversa
peliculas.pop(); //Sacamos el ultimo elemento del array
var indice = peliculas.indexOf('La vida es bella'); //buscamos el indice del elemento pasado por parametro
console.log(indice);
if(indice>-1){ //Si lo hemos encontrado...
peliculas.splice(indice,1); //Eliminamos desde ese elemento los siguientes indicados
}
console.log(peliculas);
var peliculasString = peliculas.join(); //unimos los elementos del array en un String separado por comas
console.log(peliculasString);
var cadena ="texto1, texto2, texto3";
var cadenaArray = cadena.split(", "); //creamos un array definiendo el separador de los elementos de la cadena
console.log(cadenaArray);
//Busquedas
var busqueda = peliculas.find(function(peli){ //Buscamos un elemento del array
return peli == 'Gran Torino';
});
console.log(busqueda);
var precios = [10,12,80,47];
var busquedaPrecios = precios.some(function(pr){ //Buscamos un elemento que cumpla una condicion concreta(Booleano)
return pr>=20;
});
console.log(busquedaPrecios);<file_sep>/Javascript/Hola-mundo/js/19-Funciones.js
'use strict'
//Funciones
function HolaMundo(){
console.log("H<NAME>"); //Funcion sin parámetros
}
HolaMundo();
function suma(numero1, numero2){ //Funcion con parámetros
return (numero1 + numero2);
}
console.log(suma(2,3));
function operacion(numero1,numero2,sel=true){ //Funcion con parametro opcional
if(sel==true){
return(numero1 + numero2);
}else{
return(numero1-numero2);
}
}
console.log(operacion(2,4)); //toma el parametro sel por defecto(true)
console.log(operacion(2,4,true)); //definimos todos los parametrs sel=true
console.log(operacion(2,4,false)); //definimos todos los parametrs sel=false<file_sep>/Javascript/aprendiendo-jquery/js/01-Selectores.js
$(document).ready(function(){
console.log("Estamos listos");
//Selector de ID
var rojo = $("#rojo");
rojo.css("background", "red")
.css("color","white");
var amarillo = $("#amarillo");
amarillo.css("background", "yellow")
.css("color","green");
$("#verde").css("background", "green")
.css("color","white");
//Selectores de clase
var mi_clase = $(".zebra");
mi_clase.css("border", "5px dashed black");
//Seleccion de etiquetas
var parrafos = $('p');
parrafos.click(function(){
$(this).addClass('zebra');
console.log("Cliqueado!!");
});
//Seleccion de atributos
$('[title="Google"]').css('background','#ccc');
$('[title="Meneame"]').css('background','orange');
//Otros
//$('p,a').addClass("margen-superior");
var busqueda = $("#caja").find('.resaltado');
console.log(busqueda);
});<file_sep>/Javascript/proyecto-js/js/main.js
$(document).ready(function(){
if(window.location.href.indexOf('index')>-1){ //Comprobamos que estamos en index.html y ejecutamos el código
//Slider
$('.bxslider').bxSlider({
mode: 'fade',
captions: true,
slideWidth: 1200
});
}
if(window.location.href.indexOf('index')>-1){ //Comprobamos que estamos en index.html y ejecutamos el código
//post
var posts=[
{
title: 'Prueba de título 1',
date:"Publicado el "+ moment().date()+ " de " + moment().format("MMMM") + " de " + moment().format("YYYY"),
content: 'In aliquet molestie suscipit. Nam nec est elementum, tristique nisi at, malesuada orci. Vestibulum faucibus vulputate tortor, venenatis hendrerit quam cursus sit amet. Sed magna magna, dignissim eget efficitur eu, bibendum id mauris. Sed vel vulputate purus. Fusce tempus maximus risus euismod semper. Fusce a urna fringilla, accumsan dolor at, posuere nisl. Nulla sit amet tortor vitae dui euismod condimentum in non nunc. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed aliquet dui leo, eget varius erat malesuada posuere. Mauris pulvinar lobortis ex, non pulvinar libero porttitor sed. Nullam sagittis, tortor sit amet sodales fringilla, ipsum libero auctor justo, quis laoreet nibh ipsum a diam. Suspendisse lacus nunc, mattis in diam ut, semper ultricies urna. Donec euismod, metus ac efficitur pellentesque, justo neque ornare eros, vel tempor ex lorem nec tortor. Pellentesque mollis placerat sapien. Suspendisse in risus et dui ultrices posuere.'
},
{
title: 'Prueba de título 2',
date:"Publicado el "+ moment().date()+ " de " + moment().format("MMMM") + " de " + moment().format("YYYY"),
content: 'In aliquet molestie suscipit. Nam nec est elementum, tristique nisi at, malesuada orci. Vestibulum faucibus vulputate tortor, venenatis hendrerit quam cursus sit amet. Sed magna magna, dignissim eget efficitur eu, bibendum id mauris. Sed vel vulputate purus. Fusce tempus maximus risus euismod semper. Fusce a urna fringilla, accumsan dolor at, posuere nisl. Nulla sit amet tortor vitae dui euismod condimentum in non nunc. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed aliquet dui leo, eget varius erat malesuada posuere. Mauris pulvinar lobortis ex, non pulvinar libero porttitor sed. Nullam sagittis, tortor sit amet sodales fringilla, ipsum libero auctor justo, quis laoreet nibh ipsum a diam. Suspendisse lacus nunc, mattis in diam ut, semper ultricies urna. Donec euismod, metus ac efficitur pellentesque, justo neque ornare eros, vel tempor ex lorem nec tortor. Pellentesque mollis placerat sapien. Suspendisse in risus et dui ultrices posuere.'
},
{
title: 'Prueba de título 3',
date:"Publicado el "+ moment().date()+ " de " + moment().format("MMMM") + " de " + moment().format("YYYY"),
content: 'In aliquet molestie suscipit. Nam nec est elementum, tristique nisi at, malesuada orci. Vestibulum faucibus vulputate tortor, venenatis hendrerit quam cursus sit amet. Sed magna magna, dignissim eget efficitur eu, bibendum id mauris. Sed vel vulputate purus. Fusce tempus maximus risus euismod semper. Fusce a urna fringilla, accumsan dolor at, posuere nisl. Nulla sit amet tortor vitae dui euismod condimentum in non nunc. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed aliquet dui leo, eget varius erat malesuada posuere. Mauris pulvinar lobortis ex, non pulvinar libero porttitor sed. Nullam sagittis, tortor sit amet sodales fringilla, ipsum libero auctor justo, quis laoreet nibh ipsum a diam. Suspendisse lacus nunc, mattis in diam ut, semper ultricies urna. Donec euismod, metus ac efficitur pellentesque, justo neque ornare eros, vel tempor ex lorem nec tortor. Pellentesque mollis placerat sapien. Suspendisse in risus et dui ultrices posuere.'
},
{
title: 'Prueba de título 4',
date:"Publicado el "+ moment().date()+ " de " + moment().format("MMMM") + " de " + moment().format("YYYY"),
content: 'In aliquet molestie suscipit. Nam nec est elementum, tristique nisi at, malesuada orci. Vestibulum faucibus vulputate tortor, venenatis hendrerit quam cursus sit amet. Sed magna magna, dignissim eget efficitur eu, bibendum id mauris. Sed vel vulputate purus. Fusce tempus maximus risus euismod semper. Fusce a urna fringilla, accumsan dolor at, posuere nisl. Nulla sit amet tortor vitae dui euismod condimentum in non nunc. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed aliquet dui leo, eget varius erat malesuada posuere. Mauris pulvinar lobortis ex, non pulvinar libero porttitor sed. Nullam sagittis, tortor sit amet sodales fringilla, ipsum libero auctor justo, quis laoreet nibh ipsum a diam. Suspendisse lacus nunc, mattis in diam ut, semper ultricies urna. Donec euismod, metus ac efficitur pellentesque, justo neque ornare eros, vel tempor ex lorem nec tortor. Pellentesque mollis placerat sapien. Suspendisse in risus et dui ultrices posuere.'
},{
title: 'Prueba de título 5',
date:"Publicado el "+ moment().date()+ " de " + moment().format("MMMM") + " de " + moment().format("YYYY"),
content: 'In aliquet molestie suscipit. Nam nec est elementum, tristique nisi at, malesuada orci. Vestibulum faucibus vulputate tortor, venenatis hendrerit quam cursus sit amet. Sed magna magna, dignissim eget efficitur eu, bibendum id mauris. Sed vel vulputate purus. Fusce tempus maximus risus euismod semper. Fusce a urna fringilla, accumsan dolor at, posuere nisl. Nulla sit amet tortor vitae dui euismod condimentum in non nunc. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed aliquet dui leo, eget varius erat malesuada posuere. Mauris pulvinar lobortis ex, non pulvinar libero porttitor sed. Nullam sagittis, tortor sit amet sodales fringilla, ipsum libero auctor justo, quis laoreet nibh ipsum a diam. Suspendisse lacus nunc, mattis in diam ut, semper ultricies urna. Donec euismod, metus ac efficitur pellentesque, justo neque ornare eros, vel tempor ex lorem nec tortor. Pellentesque mollis placerat sapien. Suspendisse in risus et dui ultrices posuere.'
}
]
posts.forEach((item, index) => {
var post = `
<article class="post">
<h2>${item.title}</h2>
<span class="date">${item.date}</span>
<p>${item.content}</p>
<a href="#" class="button_more">Leer más</a>
</article>
`;
$("#posts").append(post);
})
}
//Selector de tema
var theme = $("#theme");
$("#to_green").click(function(){
theme.attr("href", "css/green.css");
});
$("#to_red").click(function(){
theme.attr("href", "css/red.css");
});
$("#to_blue").click(function(){
theme.attr("href", "css/blue.css");
});
//Scroll arriba de la web
$(".subir").click(function(){
$("html,body").animate({
scrollTop:0
},500);
return false;
});
//Login falso
$("#login form").submit(function(){
var form_name = $("#form_name").val();
localStorage.setItem("form_name",form_name);
});
var form_name = localStorage.getItem("form_name");
if(form_name != null && form_name != "undefined"){
var about_parrafo = $("#about p");
about_parrafo.html("Bienvenido, "+form_name);
about_parrafo.append("<a href='#' id='logout'> Cerrar sesión</a>" );
$("#login").hide();
}
$("#logout").click(function(){
localStorage.clear(); //limppiamos datos del local Storage
location.reload(); //Recargamos la pagina
});
if(window.location.href.indexOf('about')>-1){
$("#acordeon").accordion();
}
});
<file_sep>/Javascript/Hola-mundo/js/09-bucle-while.js
'use strict'
//Bucle While
var year = 2018;
while(year <= 2051){
console.log("Estamos en el año: " + year);
year++;
}
//Do While
do{
console.log("Estamos en el año: " + year);
year++;
}while(year <= 2100)
|
eb2e4a6d31fe1029797d668ec83e72a759055cb4
|
[
"JavaScript"
] | 20 |
JavaScript
|
castorfaener/JSCourse
|
b2aab1559ed0a6da2d0909719d7f219d1bbf2c58
|
661af311df13a56523bb727873f01868bca447ea
|
refs/heads/master
|
<repo_name>glaucosydow/pos-webdev<file_sep>/README.md
# Expressive - RestBeer
## Instalando o expressive
```
curl -s http://getcomposer.org/installer | php
php composer.phar require zendframework/zend-expressive
php composer.phar require zendframework/zend-expressive-fastroute
php composer.phar require zendframework/zend-servicemanager
```
## Instalando o Twig
```
php composer.phar require zendframework/zend-expressive-twigrenderer
```
<file_sep>/src/RestBeer/Session.php
<?php
namespace RestBeer;
use Dflydev\FigCookies\SetCookie;
use Lcobucci\JWT\Parser;
use Lcobucci\JWT\Signer\Hmac\Sha256;
use PSR7Session\Http\SessionMiddleware;
use PSR7Session\Time\SystemCurrentTime;
class Session
{
public function get()
{
$sessionMiddleware = new SessionMiddleware(
new Sha256(),
'<KEY> // signature key (important: change this to your own)
'<KEY> // verification key (important: change this to your own)
SetCookie::create('an-example-cookie-name')
->withSecure(false) // false on purpose, unless you have https locally
->withHttpOnly(true)
->withPath('/'),
new Parser(),
1200, // 20 minutes
new SystemCurrentTime()
);
return $sessionMiddleware;
}
}
|
5dc36897c78499de59b4237c6e1033a70c966dac
|
[
"Markdown",
"PHP"
] | 2 |
Markdown
|
glaucosydow/pos-webdev
|
eb940b2cb0c09db2d35947a53bf9ec8d8f3263f5
|
97dc959a77a299f2deefb15fe0abb2894304af22
|
refs/heads/master
|
<repo_name>keishinkickback/DiReAL<file_sep>/inception_score.py
from __future__ import print_function
import argparse
import torch
from torch import nn
from torch.nn import functional as F
import torch.utils.data
from torchvision.models.inception import inception_v3
import torchvision.transforms as transforms
import numpy as np
from scipy.stats import entropy
from models import Generator
def inception_score(imgs, cuda=True, batch_size=32, resize=True, splits=1):
"""Computes the inception score of the generated images imgs
imgs -- Torch dataset of (3xHxW) numpy images normalized in the range [-1, 1]
cuda -- whether or not to run on GPU
batch_size -- batch size for feeding into Inception v3
splits -- number of splits
"""
N = len(imgs)
assert batch_size > 0
assert N > batch_size
# Set up dtype
if cuda:
dtype = torch.cuda.FloatTensor
else:
if torch.cuda.is_available():
print("WARNING: You have a CUDA device, so you should probably set cuda=True")
dtype = torch.FloatTensor
# Set up dataloader
dataloader = torch.utils.data.DataLoader(imgs, batch_size=batch_size)
# Load inception model
inception_model = inception_v3(pretrained=True, transform_input=False).type(dtype)
inception_model.eval()
up = nn.Upsample(size=(299, 299), mode='bilinear',align_corners=False).type(dtype)
def get_pred(x):
if resize:
x = up(x)
x = inception_model(x)
return F.softmax(x,dim=1).data.cpu().numpy()
# Get predictions
preds = np.zeros((N, 1000))
for i, batch in enumerate(dataloader, 0):
batch = batch.type(dtype)
batch_size_i = batch.size()[0]
preds[i*batch_size:i*batch_size + batch_size_i] = get_pred(batch)
# Now compute the mean kl-div
split_scores = []
for k in range(splits):
part = preds[k * (N // splits): (k+1) * (N // splits), :]
py = np.mean(part, axis=0)
scores = []
for i in range(part.shape[0]):
pyx = part[i, :]
scores.append(entropy(pyx, py))
split_scores.append(np.exp(np.mean(scores)))
return np.mean(split_scores), np.std(split_scores)
def test(opt):
class IgnoreLabelDataset(torch.utils.data.Dataset):
def __init__(self, orig, size=None):
self.orig = orig
self.size = size
if self.size is None:
self.size = len(self.orig)
def __getitem__(self, index):
return self.orig[index][0]
def __len__(self):
# return self.size
return 1024
import torchvision.datasets as dset
import torchvision.transforms as transforms
cifar = dset.CIFAR10(root='.', download=True,
transform=transforms.Compose([
transforms.Scale(32),
transforms.ToTensor(),
# transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))
transforms.Normalize([0.485, 0.456, 0.406],[0.229, 0.224, 0.225])
])
)
# IgnoreLabelDataset(cifar, opt.n_images)
print ("Calculating Inception Score...")
print (inception_score(IgnoreLabelDataset(cifar, opt.n_images), cuda=opt.cuda, batch_size=opt.batchSize, resize=True, splits=1))
class gan_Dataset(torch.utils.data.Dataset):
def __init__(self, model, nz=100, cuda=True, n_images=1024):
self.model = model.eval()
self.nz = nz
self.noise = torch.FloatTensor(1, self.nz, 1, 1)
self.n_images = n_images
self.cuda = cuda
if self.cuda:
self.model.cuda()
self.noise = self.noise.cuda()
self.transforms=transforms.Compose([
# transforms.ToPILImage(),
# transforms.Resize(32),
# transforms.ToTensor(),
# transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))
transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
])
def __getitem__(self, index):
self.noise.resize_(1, self.nz, 1, 1).normal_(0, 1)
fake = self.model(self.noise)
fake = fake.cpu().data.squeeze()
fake = (fake + 1.0) / 2.0
# return fake
return self.transforms(fake)
def __len__(self):
return self.n_images
def calc_inception_score_from_state_dict(opt):
nz = opt.nz
cuda = opt.cuda
netG = Generator(nz)
netG.load_state_dict(torch.load(opt.netG))
print(netG)
print("Calculating Inception Score...")
print(
inception_score(gan_Dataset(netG,nz=opt.nz,cuda=opt.cuda,n_images=opt.n_images),
cuda=opt.cuda, batch_size=opt.batchSize, resize=True, splits=10))
def calc_inception_score(netG, nz, cuda=True, n_images=1024, batchSize=32):
print("Calculating Inception Score...")
print(
inception_score(gan_Dataset(netG, nz=nz,cuda=cuda, n_images=n_images),
batch_size=batchSize, resize=True, splits=10))
def get_args():
parser = argparse.ArgumentParser()
parser.add_argument('--test', action='store_true', help='test inception score of cifar10')
parser.add_argument('--batchSize', type=int, default=16, help='input batch size')
parser.add_argument('--imageSize', type=int, default=32, help='the height / width of the input image to network')
parser.add_argument('--nz', type=int, default=128, help='size of the latent z vector')
parser.add_argument('--cuda', action='store_true', help='enables cuda', default=True)
parser.add_argument('--netG', default='', help="path to netG (to continue training)")
parser.add_argument('--n_images', default=4096, help="number of generating images")
opt = parser.parse_args()
print(opt)
return opt
if __name__ == '__main__':
opt = get_args()
# opt.cuda = True
# opt.netG = '/home/keishin/sandbox/pytorch-spectral-normalization-gan/checkpoints_dcgan_divreg/gen_100'
if opt.test:
test(opt)
else:
calc_inception_score_from_state_dict(opt)
<file_sep>/util.py
import torch
import torch.nn.parallel
import torch.utils.data
# custom weights initialization called on netG and netD
def weights_init(m):
classname = m.__class__.__name__
if classname.find('Conv') != -1:
m.weight.data.normal_(0.0, 0.02)
elif classname.find('BatchNorm') != -1:
m.weight.data.normal_(1.0, 0.02)
m.bias.data.fill_(0)
def direal_loss(model, threshold=0.6):
def cos_sim_matrix(matrix):
d = torch.mm(matrix, matrix.t())
norm = (matrix * matrix).sum(dim=1, keepdim=True) ** .5
return d / norm / norm.t()
loss = 0.0
for ly in model.parameters():
dim = ly.size()
if len(dim) == 4 and dim[1] != 3:
weight_matrix = ly.view(dim[0], -1)
sim_matrix = cos_sim_matrix(weight_matrix) # similatity
mask = sim_matrix - torch.eye(sim_matrix.size(0)).to(sim_matrix.device) # 0s in diagonal
mask = mask * (threshold < mask.abs() ).float()
loss += torch.sum(mask ** 2)
return loss
<file_sep>/main_pld.py
import argparse
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torch.optim.lr_scheduler import ExponentialLR
from torchvision import datasets, transforms
from torch.autograd import Variable
import models_resnet
import models
import numpy as np
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
import os
from util import direal_loss
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--batch_size', type=int, default=64)
parser.add_argument('--lr', type=float, default=2e-4)
parser.add_argument('--loss', type=str, default='bce')
parser.add_argument('--divreg', action='store_true', default=True)
parser.add_argument('--checkpoint_dir', type=str, default='checkpoints_dcgan_divreg')
parser.add_argument('--model', type=str, default='resnet_not')
parser.add_argument('--use_clamp', action='store_true', default=False)
parser.add_argument('--clamp_upper', type=float, default=1.0)
parser.add_argument('--clamp_lower', type=float, default=-1.0)
args = parser.parse_args()
print(args)
loader = torch.utils.data.DataLoader(
datasets.CIFAR10('../data/', train=True, download=True,
transform=transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))])),
batch_size=args.batch_size, shuffle=True, num_workers=1, pin_memory=True)
Z_dim = 128
#number of updates to discriminator for every update to generator
disc_iters = 5
# discriminator = torch.nn.DataParallel(Discriminator()).cuda() # TODO: try out multi-gpu training
if args.model == 'resnet':
discriminator = models_resnet.Discriminator().cuda()
generator = models_resnet.Generator(Z_dim).cuda()
else:
discriminator = models.Discriminator_SN().cuda()
generator = models.Generator(Z_dim).cuda()
# because the spectral normalization module creates parameters that don't require gradients (u and v), we don't want to
# optimize these using sgd. We only let the optimizer operate on parameters that _do_ require gradients
# TODO: replace Parameters with buffers, which aren't returned from .parameters() method.
optim_disc = optim.Adam(filter(lambda p: p.requires_grad, discriminator.parameters()), lr=args.lr, betas=(0.0,0.9))
optim_gen = optim.Adam(generator.parameters(), lr=args.lr, betas=(0.0,0.9))
# use an exponentially decaying learning rate
scheduler_d = optim.lr_scheduler.ExponentialLR(optim_disc, gamma=0.99)
scheduler_g = optim.lr_scheduler.ExponentialLR(optim_gen, gamma=0.99)
def train(epoch):
for batch_idx, (data, target) in enumerate(loader):
if data.size()[0] != args.batch_size:
continue
data, target = Variable(data.cuda()), Variable(target.cuda())
# update discriminator
# weight clipping
if args.use_clamp:
# clamp parameters to a cube
for p in discriminator.parameters():
p.data.clamp_(args.clamp_lower, args.clamp_upper)
for _ in range(disc_iters):
z = Variable(torch.randn(args.batch_size, Z_dim).cuda())
optim_disc.zero_grad()
optim_gen.zero_grad()
if args.loss == 'hinge':
disc_loss = nn.ReLU()(1.0 - discriminator(data)).mean() + nn.ReLU()(1.0 + discriminator(generator(z))).mean()
elif args.loss == 'wasserstein':
disc_loss = -discriminator(data).mean() + discriminator(generator(z)).mean()
else:
disc_loss = nn.BCEWithLogitsLoss()(discriminator(data), Variable(torch.ones(args.batch_size, 1).cuda())) + \
nn.BCEWithLogitsLoss()(discriminator(generator(z)), Variable(torch.zeros(args.batch_size, 1).cuda()))
if args.divreg:
disc_loss += direal_loss(discriminator, 0.6).cuda()
disc_loss.backward()
optim_disc.step()
z = Variable(torch.randn(args.batch_size, Z_dim).cuda())
# update generator
optim_disc.zero_grad()
optim_gen.zero_grad()
if args.loss == 'hinge' or args.loss == 'wasserstein':
gen_loss = -discriminator(generator(z)).mean()
else:
gen_loss = nn.BCEWithLogitsLoss()(discriminator(generator(z)), Variable(torch.ones(args.batch_size, 1).cuda()))
gen_loss.backward()
optim_gen.step()
if batch_idx % 100 == 0:
print('disc loss', disc_loss.data[0], 'gen loss', gen_loss.data[0])
scheduler_d.step()
scheduler_g.step()
fixed_z = Variable(torch.randn(args.batch_size, Z_dim).cuda())
def evaluate(epoch):
samples = generator(fixed_z).cpu().data.numpy()[:64]
fig = plt.figure(figsize=(8, 8))
gs = gridspec.GridSpec(8, 8)
gs.update(wspace=0.05, hspace=0.05)
for i, sample in enumerate(samples):
ax = plt.subplot(gs[i])
plt.axis('off')
ax.set_xticklabels([])
ax.set_yticklabels([])
ax.set_aspect('equal')
plt.imshow(sample.transpose((1,2,0)) * 0.5 + 0.5)
if not os.path.exists('out/'):
os.makedirs('out/')
plt.savefig('out/{}.png'.format(str(epoch).zfill(3)), bbox_inches='tight')
plt.close(fig)
if not os.path.exists(args.checkpoint_dir):
os.makedirs(args.checkpoint_dir)
for epoch in range(2000):
train(epoch)
evaluate(epoch)
torch.save(discriminator.state_dict(), os.path.join(args.checkpoint_dir, 'disc_{}'.format(epoch)))
torch.save(generator.state_dict(), os.path.join(args.checkpoint_dir, 'gen_{}'.format(epoch)))
<file_sep>/models.py
# DCGAN-like generator and discriminator
from torch import nn
from torch.nn.modules import normalization as Norm
from torch.nn.utils.weight_norm import weight_norm
from spectral_normalization import SpectralNorm
class Generator(nn.Module):
def __init__(self, z_dim, nc=3):
super(Generator, self).__init__()
self.z_dim = z_dim
self.model = nn.Sequential(
nn.ConvTranspose2d(z_dim, 512, 4, stride=1),
nn.BatchNorm2d(512),
nn.ReLU(),
nn.ConvTranspose2d(512, 256, 4, stride=2, padding=(1,1)),
nn.BatchNorm2d(256),
nn.ReLU(),
nn.ConvTranspose2d(256, 128, 4, stride=2, padding=(1,1)),
nn.BatchNorm2d(128),
nn.ReLU(),
nn.ConvTranspose2d(128, 64, 4, stride=2, padding=(1,1)),
nn.BatchNorm2d(64),
nn.ReLU(),
nn.ConvTranspose2d(64, nc, 3, stride=1, padding=(1,1)),
nn.Tanh())
def forward(self, z):
return self.model(z.view(-1, self.z_dim, 1, 1))
class Discriminator_plain(nn.Module):
def __init__(self, nc=3, ndf=64):
super(Discriminator_plain, self).__init__()
self.main = nn.Sequential(
nn.Conv2d(nc, ndf, 4, 2, 1, bias=False),
nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(ndf, ndf * 2, 4, 2, 1, bias=False),
nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(ndf * 2, ndf * 4, 4, 2, 1, bias=False),
nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(ndf * 4, ndf * 8, 4, 2, 1, bias=False),
nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(ndf * 8, 1, 4, 1, 1, bias=False)
)
def forward(self, input):
output = self.main(input)
return output.view(-1, 1)
class Discriminator_BN(Discriminator_plain):
def __init__(self, nc=3, ndf=64):
super(Discriminator_BN, self).__init__()
self.main = nn.Sequential(
nn.Conv2d(nc, ndf, 4, 2, 1, bias=False),
nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(ndf, ndf * 2, 4, 2, 1, bias=False),
nn.BatchNorm2d(ndf * 2),
nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(ndf * 2, ndf * 4, 4, 2, 1, bias=False),
nn.BatchNorm2d(ndf * 4),
nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(ndf * 4, ndf * 8, 4, 2, 1, bias=False),
nn.BatchNorm2d(ndf * 8),
nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(ndf * 8, 1, 4, 1, 1, bias=False)
)
class Discriminator_LN(Discriminator_plain):
def __init__(self, nc=3, ndf=64):
super(Discriminator_LN, self).__init__()
self.main = nn.Sequential(
# input is (nc) x 64 x 64
nn.Conv2d(nc, ndf, 4, 2, 1, bias=False),
nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(ndf, ndf * 2, 4, 2, 1, bias=False),
Norm.LayerNorm((ndf * 2, 8, 8)),
nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(ndf * 2, ndf * 4, 4, 2, 1, bias=False),
Norm.LayerNorm((ndf * 4, 4, 4)),
nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(ndf * 4, ndf * 8, 4, 2, 1, bias=False),
Norm.LayerNorm((ndf * 8, 2, 2)),
nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(ndf * 8, 1, 4, 1, 1, bias=False),
)
class Discriminator_WN(Discriminator_plain):
def __init__(self, nc=3, ndf=64):
super(Discriminator_WN, self).__init__()
self.main = nn.Sequential(
weight_norm(nn.Conv2d(nc, ndf, 4, 2, 1, bias=False)),
nn.LeakyReLU(0.2, inplace=True),
weight_norm(nn.Conv2d(ndf, ndf * 2, 4, 2, 1, bias=False)),
nn.LeakyReLU(0.2, inplace=True),
weight_norm(nn.Conv2d(ndf * 2, ndf * 4, 4, 2, 1, bias=False)),
nn.LeakyReLU(0.2, inplace=True),
weight_norm(nn.Conv2d(ndf * 4, ndf * 8, 4, 2, 1, bias=False)),
nn.LeakyReLU(0.2, inplace=True),
weight_norm(nn.Conv2d(ndf * 8, 1, 4, 1, 1, bias=False))
)
class Discriminator_SN(nn.Module):
def __init__(self, nc=3, leak = 0.1, w_g = 4):
super(Discriminator_SN, self).__init__()
self.w_g = w_g
self.main = nn.Sequential(
SpectralNorm(nn.Conv2d(nc, 64, 3, stride=1, padding=(1,1))),
nn.LeakyReLU(leak, inplace=True),
SpectralNorm(nn.Conv2d(64, 64, 4, stride=2, padding=(1,1))),
nn.LeakyReLU(leak, inplace=True),
SpectralNorm(nn.Conv2d(64, 128, 3, stride=1, padding=(1,1))),
nn.LeakyReLU(leak, inplace=True),
SpectralNorm(nn.Conv2d(128, 128, 4, stride=2, padding=(1,1))),
nn.LeakyReLU(leak, inplace=True),
SpectralNorm(nn.Conv2d(128, 256, 3, stride=1, padding=(1,1))),
nn.LeakyReLU(leak, inplace=True),
SpectralNorm(nn.Conv2d(256, 256, 4, stride=2, padding=(1,1))),
nn.LeakyReLU(leak, inplace=True),
SpectralNorm(nn.Conv2d(256, 512, 3, stride=1, padding=(1,1))),
nn.LeakyReLU(leak, inplace=True)
)
self.fc = SpectralNorm(nn.Linear(4 * 4* 512, 1))
def forward(self, x):
m = self.main(x)
return self.fc(m.view(-1, 4 * 4 * 512))
if __name__ == '__main__':
import torch
z = torch.rand(128)
g = Generator(128)
x = g(z)
lst_Ds = [Discriminator_plain, Discriminator_BN, Discriminator_LN, Discriminator_WN, Discriminator_SN]
for ds in lst_Ds:
d = ds()
y = d(x)
print(ds.__name__,'z',z.size(), 'x', x.size(), 'y', y.size())
<file_sep>/models_resnet.py
# ResNet generator and discriminator
from torch import nn
from spectral_normalization import SpectralNorm
import numpy as np
class ResBlockGenerator(nn.Module):
def __init__(self, in_channels, out_channels, stride=1):
super(ResBlockGenerator, self).__init__()
self.conv1 = nn.Conv2d(in_channels, out_channels, 3, 1, padding=1)
self.conv2 = nn.Conv2d(out_channels, out_channels, 3, 1, padding=1)
nn.init.xavier_uniform_(self.conv1.weight.data, 1.)
nn.init.xavier_uniform_(self.conv2.weight.data, 1.)
self.model = nn.Sequential(
nn.BatchNorm2d(in_channels),
nn.ReLU(),
nn.Upsample(scale_factor=2),
self.conv1,
nn.BatchNorm2d(out_channels),
nn.ReLU(),
self.conv2
)
self.bypass = nn.Sequential()
if stride != 1:
self.bypass = nn.Upsample(scale_factor=2)
def forward(self, x):
return self.model(x) + self.bypass(x)
class ResBlockDiscriminator(nn.Module):
def __init__(self, in_channels, out_channels, stride=1):
super(ResBlockDiscriminator, self).__init__()
self.conv1 = nn.Conv2d(in_channels, out_channels, 3, 1, padding=1)
self.conv2 = nn.Conv2d(out_channels, out_channels, 3, 1, padding=1)
nn.init.xavier_uniform_(self.conv1.weight.data, 1.)
nn.init.xavier_uniform_(self.conv2.weight.data, 1.)
if stride == 1:
self.model = nn.Sequential(
nn.ReLU(),
SpectralNorm(self.conv1),
nn.ReLU(),
SpectralNorm(self.conv2)
)
else:
self.model = nn.Sequential(
nn.ReLU(),
SpectralNorm(self.conv1),
nn.ReLU(),
SpectralNorm(self.conv2),
nn.AvgPool2d(2, stride=stride, padding=0)
)
self.bypass = nn.Sequential()
if stride != 1:
self.bypass_conv = nn.Conv2d(in_channels,out_channels, 1, 1, padding=0)
nn.init.xavier_uniform_(self.bypass_conv.weight.data, np.sqrt(2))
self.bypass = nn.Sequential(
SpectralNorm(self.bypass_conv),
nn.AvgPool2d(2, stride=stride, padding=0)
)
def forward(self, x):
return self.model(x) + self.bypass(x)
# special ResBlock just for the first layer of the discriminator
class FirstResBlockDiscriminator(nn.Module):
def __init__(self, in_channels, out_channels, stride=1):
super(FirstResBlockDiscriminator, self).__init__()
self.conv1 = nn.Conv2d(in_channels, out_channels, 3, 1, padding=1)
self.conv2 = nn.Conv2d(out_channels, out_channels, 3, 1, padding=1)
self.bypass_conv = nn.Conv2d(in_channels, out_channels, 1, 1, padding=0)
nn.init.xavier_uniform_(self.conv1.weight.data, 1.)
nn.init.xavier_uniform_(self.conv2.weight.data, 1.)
nn.init.xavier_uniform_(self.bypass_conv.weight.data, np.sqrt(2))
self.model = nn.Sequential(
SpectralNorm(self.conv1),
nn.ReLU(),
SpectralNorm(self.conv2),
nn.AvgPool2d(2)
)
self.bypass = nn.Sequential(
nn.AvgPool2d(2),
SpectralNorm(self.bypass_conv),
)
def forward(self, x):
return self.model(x) + self.bypass(x)
class Generator(nn.Module):
def __init__(self, z_dim, nc=3, ngf=128):
super(Generator, self).__init__()
self.ngf=ngf
self.dense = nn.Linear(z_dim, 4 * 4 * ngf)
self.final = nn.Conv2d(ngf, nc, 3, stride=1, padding=1)
nn.init.xavier_uniform_(self.dense.weight.data, 1.)
nn.init.xavier_uniform_(self.final.weight.data, 1.)
self.model = nn.Sequential(
ResBlockGenerator(ngf, ngf, stride=2),
ResBlockGenerator(ngf, ngf, stride=2),
ResBlockGenerator(ngf, ngf, stride=2),
nn.BatchNorm2d(ngf),
nn.ReLU(),
self.final,
nn.Tanh())
def forward(self, z):
return self.model(self.dense(z).view(-1, self.ngf, 4, 4))
class Discriminator(nn.Module):
def __init__(self, nc=3, ndf=128):
super(Discriminator, self).__init__()
self.ndf = ndf
self.model = nn.Sequential(
FirstResBlockDiscriminator(nc, ndf, stride=2),
ResBlockDiscriminator(ndf, ndf, stride=2),
ResBlockDiscriminator(ndf, ndf),
ResBlockDiscriminator(ndf, ndf),
nn.ReLU(),
nn.AvgPool2d(8),
)
self.fc = nn.Linear(ndf, 1)
nn.init.xavier_uniform_(self.fc.weight.data, 1.)
self.fc = SpectralNorm(self.fc)
def forward(self, x):
return self.fc(self.model(x).view(-1, self.ndf))
if __name__ == '__main__':
import torch
z = torch.rand(128)
g = Generator(128)
x = g(z)
d = Discriminator()
y = d(x)
print( 'z', z.size(), 'x', x.size(), 'y', y.size())<file_sep>/main.py
import argparse
import os
import torch
import torch.nn as nn
import torch.optim as optim
from torchvision import datasets, transforms
import torchvision.utils as vutils
import models
import models_resnet
from util import direal_loss
def train(epoch):
for batch_idx, (data, target) in enumerate(loader):
if data.size()[0] != args.batch_size:
continue
data, target = data.to(device=args.device), target.to(device=args.device)
# weight clipping
if args.use_clamp:
for p in discriminator.parameters():
p.data.clamp_(args.clamp_lower, args.clamp_upper)
for _ in range(args.n_dupdates):
z = torch.randn(args.batch_size, args.nz).to(device=args.device)
optim_disc.zero_grad()
optim_gen.zero_grad()
if args.loss == 'hinge':
disc_loss = nn.ReLU()(1.0 - discriminator(data)).mean() + nn.ReLU()(1.0 + discriminator(generator(z))).mean()
elif args.loss == 'wasserstein':
disc_loss = -discriminator(data).mean() + discriminator(generator(z)).mean()
else:
disc_loss = nn.BCEWithLogitsLoss()(discriminator(data), torch.ones(args.batch_size, 1).to(device=args.device))
disc_loss += nn.BCEWithLogitsLoss()(discriminator(generator(z)), torch.zeros(args.batch_size, 1).to(device=args.device))
if args.divreg:
disc_loss += direal_loss(discriminator, 0.6).to(device=args.device)
disc_loss.backward()
optim_disc.step()
z = torch.randn(args.batch_size, args.nz).cuda()
# update generator
optim_disc.zero_grad()
optim_gen.zero_grad()
if args.loss == 'hinge' or args.loss == 'wasserstein':
gen_loss = -discriminator(generator(z)).mean()
else:
gen_loss = nn.BCEWithLogitsLoss()(discriminator(generator(z)), torch.ones(args.batch_size, 1).to(device=args.device))
gen_loss.backward()
optim_gen.step()
if batch_idx % 100 == 0:
print('epoch:', epoch, 'batch:', batch_idx, 'disc loss', disc_loss.data[0], 'gen loss', gen_loss.data[0])
vutils.save_image(real_cpu, '%s/real_samples.png' % opt.outf, normalize=True)
fake = netG(fixed_noise)
vutils.save_image(fake.detach(),
'%s/fake_samples_epoch_%03d.png' % (opt.outf, epoch),
normalize=True)
scheduler_d.step()
scheduler_g.step()
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--batch_size', type=int, default=64)
parser.add_argument('--lr', type=float, default=2e-4)
parser.add_argument('--loss', type=str, default='bce')
parser.add_argument('--direal', action='store_true', default=True)
parser.add_argument('--checkpoint_dir', type=str, default='checkpoints')
parser.add_argument('--discriminator', type=str, default='resnet_not', help='plain, bn, ln, or wn')
parser.add_argument('--generator', type=str, default='resnet', help='plain, resnet')
parser.add_argument('--nz', type=int, default=128, help='size of the latent z vector')
parser.add_argument('--use_clamp', action='store_true', default=False)
parser.add_argument('--clamp_upper', type=float, default=1.0)
parser.add_argument('--clamp_lower', type=float, default=-1.0)
parser.add_argument('--cuda', action='store_true', default=False)
parser.add_argument('--n_dupdates', default=5,type=int, help='number of updates to discriminator for every update to generator')
args = parser.parse_args()
args.device = torch.device("cuda:0" if args.cuda and torch.cuda.is_available() else 'cpu')
print(args)
dataset_cifar10 = datasets.CIFAR10('../data/', train=True, download=True,
transform=transforms.Compose([ transforms.ToTensor(),
transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))])
)
loader = torch.utils.data.DataLoader( dataset_cifar10, batch_size=args.batch_size, shuffle=True, num_workers=1,
pin_memory=True)
# discriminator
if args.discriminator.lower() == 'plain':
discriminator = models.Discriminator_plain()
elif args.discriminator.lower() == 'bn':
discriminator = models.Discriminator_BN()
elif args.discriminator.lower() == 'ln':
discriminator = models.Discriminator_LN()
elif args.discriminator.lower() == 'wn':
discriminator = models.Discriminator_WN()
elif args.discriminator.lower() == 'sn':
discriminator = models.Discriminator_SN()
elif args.discriminator.lower() == 'resnet':
discriminator = models_resnet.Discriminator()
#generator
if args.generator.lower() == 'resnet':
generator = models_resnet.Generator(args.nz)
elif args.generator.lower() == 'plain':
generator = models.Generator(args.nz)
assert discriminator
assert generator
discriminator = discriminator.to(device=args.device)
generator = generator.to(device=args.device)
print(discriminator)
print(generator)
optim_disc = optim.Adam(filter(lambda p: p.requires_grad, discriminator.parameters()), lr=args.lr, betas=(0.0, 0.9))
optim_gen = optim.Adam(generator.parameters(), lr=args.lr, betas=(0.0, 0.9))
scheduler_d = optim.lr_scheduler.ExponentialLR(optim_disc, gamma=0.99)
scheduler_g = optim.lr_scheduler.ExponentialLR(optim_gen, gamma=0.99)
if not os.path.exists(args.checkpoint_dir):
os.makedirs(args.checkpoint_dir)
fixed_z = torch.randn(args.batch_size, args.nz).cuda()
for epoch in range(200):
train(epoch)
torch.save(discriminator.state_dict(), os.path.join(args.checkpoint_dir, 'disc_{}'.format(epoch)))
torch.save(generator.state_dict(), os.path.join(args.checkpoint_dir, 'gen_{}'.format(epoch)))
<file_sep>/README.md
# DiReAL
[Diversity Regularized Adversarial Learning](https://arxiv.org/abs/1901.10824)
|
62ebddfc19721e056e1fd178813a6cfcad30f9fe
|
[
"Markdown",
"Python"
] | 7 |
Python
|
keishinkickback/DiReAL
|
91e3db231943aaca4aed0d8067f504a25f02323f
|
4d089c92459eef07461b0b87ad22de7951703591
|
refs/heads/master
|
<repo_name>srikailash/miniproject<file_sep>/src/main.py
import os
import json
import libvirt
from json import JSONDecoder
from json import JSONEncoder
from flask import Flask , request , jsonify
from random import randint
app = Flask(__name__)
VM = {}
allocated = 0
pmid_allocated = 0
vmtypes = {"types":[{"tid":1,"cpu":1,"ram":512,"disk":1},{"tid":2,"cpu":2,"ram":1024,"disk":2},{"tid":3,"cpu":4,"ram": 2048,"disk": 3}]}
jsonString = JSONEncoder().encode({"types":[{"tid":1,"cpu":1,"ram":512,"disk":1},{"tid":2,"cpu":2,"ram":1024,"disk":2},{"tid":3,"cpu":4,"ram": 2048,"disk": 3}]})
@app.route('/')
def hello_world():
return 'Hello World!'
@app.route('/vm/create' , methods = ['GET'])
def create():
vm_name = request.args.get('name')
instance_type = 0
instance_type = int(request.args.get('instance_type'))
image_id = int(request.args.get('id'))
cpu_required = vmtypes['types'][instance_type]['cpu']
ram_required = vmtypes['types'][instance_type]['ram']
dsk_required = vmtypes['types'][instance_type]['disk']
pmid = int(allocatePm(int(cpu_required) , int(ram_required) , int(dsk_required)))
if pmid == 0:
return jsonify(status = int(0))
pm_name = ""
##############################FETCHING PM_NAME FROM PMID#################
fileopen = open("pm_file" , "r")
fileinfo = fileopen.readlines()
for i in fileinfo:
j = i.split()
if int(j[0]) == int(pmid):
pm_name = str(j[1])
break
##########################################################################
conn = libvirt.open("qemu+ssh://"+pm_name+"/system")
###########HERE UID HAS TO BE CHANGED##################################
fileopen = open("vmidinfo" , "r")
uid = int(1)
fileinfo = fileopen.readlines()
uid = int(len(fileinfo))
uid = uid+1
fileopen.close()
##################CHECKING VALIDITY OF IMAGE_ID#########################
found = 0
imageFullPath = ""
fileopen = open("image_file" , "r")
imageInfo = fileopen.readlines()
for i in imageInfo:
j = i.split()
print j
if int(image_id) == int(j[0]):
imageFullPath = str(j[1])
found = 1
break
if found == 0:
return str(0)
########################################################################
print "PRINTING UID ",
print uid
temp = imageFullPath.split('/')
isofilename = str(temp[len(temp)-1])
#isofilename = "~/"+isofilename
isofilename = str(isofilename)
temp = pm_name.split("@")
isofilename = str("/home/")+str(temp[0])+str("/")+str(isofilename)
isofilename = str(isofilename)
print isofilename
#imageFullPath = "/home/anilreddy/Downloads/ubuntu-12.04-desktop-amd64.iso"
#command = "scp " + str(imageFullPath) + " " + str(pm_name) +":"+ isofilename
#print command
#os.system(command)
isofilename = str(isofilename)
xml = """<domain type='qemu' id='""" + str(uid) +"""'>
<name>""" + vm_name +"""</name>
<memory unit='KiB'>"""+str(1)+"""</memory>
<vcpu placement='static'>1</vcpu>
<os>
<type arch='x86_64' machine='pc-0.11'>hvm</type>
</os>
<devices>
<disk type='file' device='cdrom'>
<source file='"""+str(isofilename)+"""'/>
<target dev='hdc' bus='ide'/>
</disk>
</devices>
</domain>
"""
try:
print "helo"
conn.defineXML(xml)
dom = conn.lookupByName(vm_name)
except:
return str(0)
try:
VM[str(uid)] = {}
VM[str(uid)]['name'] = str(vm_name)
VM[str(uid)]['instance_type'] = instance_type
VM[str(uid)]['pmid'] = int(pmid)
try :
fileopen = open("vmidinfo","a")
toFile = str(uid) + "\t\t" + vm_name + "\t\t" + str(instance_type) + "\t\t" + str(pmid)+"\n"
fileopen.write(toFile)
fileopen.close()
except:
print "unable to write to file"
dom.create()
return jsonify(vmid = uid)
except:
return str(0)
return str(uid)
@app.route('/vm/query' , methods=['GET'])
def query():
vmid = request.args.get('vmid')
vmid = str(vmid)
fileopen = open("vmidinfo" , 'ra')
for vmscreated in fileopen:
vminfo = vmscreated.split()
if(vminfo[0] == vmid):
return jsonify(vmid = vmid , name = vminfo[1] , instance_type=vminfo[2] ,pmid = vminfo[3])
return str(0)
@app.route('/vm/destroy' , methods=['GET'])
def destroy():
vmid = request.args.get('vmid')
vmid = str(vmid)
pmid = 0
found = 0
###########GETTING PMID FROM VMID##################
fileopen = open("vmidinfo" , "r")
fileinfo = fileopen.readlines()
for i in fileinfo:
j = i.split()
if int(j[0]) == int(vmid):
found = 1
pmid = int(j[3])
break
fileopen.close()
###################################################
if int(found) == 0:
return str(0)
pm_name = ""
########GETTING PMNAME FROM PMID###################
fileopen = open("pm_file" , "r")
fileinfo = fileopen.readlines()
for i in fileinfo:
j = i.split()
if int(pmid) == int(j[0]):
pm_name = str(j[1])
break
fileopen.close()
###################################################
conn = libvirt.open("qemu+ssh://"+str(pm_name)+"/system")
fileopen = open("vmidinfo" , "r+")
fileinfo = fileopen.readlines()
#print fileinfo
fileopen.seek(0)
try:
vm_name = str('a')
for i in fileinfo:
j = i.split()
print j
if j[0] == vmid:
print "****************"
vm_name = j[1]
else:
fileopen.write(i)
print vm_name
dom = conn.lookupByName(vm_name)
dom.destroy()
conn.close()
fileopen.truncate()
fileopen.close()
return jsonify(status = 1)
except:
conn.close()
return jsonify(status = 0)
############THIS IS DONE BUT WRITE A SCRIPT TO CREATE INFO FILE OR DO NOT USE THAT FILE
@app.route('/vm/types')
def types():
#vm_types_file = open("info" , "r")
#val = json.loads(vm_types_file.read())
#return jsonify(val)
return jsonify(vmtypes)
#done
@app.route('/pm/list')
def list():
#use a file for pmids also return the list of pmids from that file
fileopen = open("pm_file" , "r")
fileinfo = fileopen.readlines()
L = []
for i in fileinfo:
j = i.split()
L.append(int(j[0]))
fileopen.close()
return jsonify( pmids = L)
#done
@app.route('/pm/listvms')
def getVmsfromid():
pmid = request.args.get('pmid')
pmid = str(pmid)
fileopen = open("vmidinfo" , "r")
vms_present = []
pms_file = open("pm_file" , "r")
pms_info = pms_file.readlines()
found = 0
for i in pms_info:
j = i.split()
if int(pmid)==int(j[0]):
found = 1
break
if found==0:
return str(0)
for i in fileopen:
print i
curr_vm = i.split()
if curr_vm[3] == pmid:
vms_present.append(int(curr_vm[0]))
return jsonify(vmids = vms_present)
##WORKING FOR THIS PC , CHECK AFTER SSH
@app.route('/pm/query') #for now i'm just printing info of my laptop
def getPmInfo(): #tobedone: ssh to all pms , make a list r, jsonify and return
pmid = request.args.get('pmid')
pm_cap = {}
pm_fre = {}
#################################FETCHING IP FROM GIVEN PMID#####################
pm_ip = str(1)
pms_file = open("pm_file" , "r")
pms_info = pms_file.readlines()
found = 0
for i in pms_info:
j = i.split()
if int(pmid)==int(j[0]):
pm_ip = str(j[1])
found = 1
break
if found==0:
return str(0)
#############################FETCHING IP DONE HERE###############################
print pm_ip
##############################HERE THE CONNECTION IS TO BE CHANGED AND THAT'S IT
conn = libvirt.open("qemu+ssh://"+str(pm_ip)+"/system")
fileopen = open("/proc/meminfo" , "r")
meminfo = fileopen.readlines()
for i in meminfo:
j = i.split()
if j[0]=="MemTotal:":
pm_cap['ram'] = int(j[1])
if j[0]=="MemFree:":
pm_fre['ram'] = int(j[1])
fileopen.close()
pm_cpu = conn.getMaxVcpus(None)
#pm_mem = conn.getMemoryStats(0 , 0)
pm_cap['cpu'] = int(pm_cpu)
#pm_cap['ram'] = int(pm_mem['total'])
pm_fre['cpu'] = int(pm_cpu)
#pm_fre['ram'] = int(pm_mem['free'])
########################disk taken li8
fileopen = open("vmidinfo" , "r")
vms_using= 0
json_data = open('info').read()
data = json.loads(json_data)
for i in fileopen:
j = i.split()
if j[3] == pmid:
vms_using = vms_using + 1
pm_fre['cpu'] = pm_fre['cpu'] - 1 ##to be corrected
return jsonify(pmid = int(pmid) , capacity = pm_cap , free = pm_fre , vms = int(vms_using))
#done
@app.route('/image/list')
def getImagesinfo():
fileopen = open("image_file", "r")
Images = []
temp = []
for imageInfo in fileopen:
fullpath = imageInfo.split()
print fullpath[0]
iname = {}
iname['id'] = int(fullpath[0])
temp = fullpath[1].split('/')
iname['name'] = str(temp[len(temp)-1])
Images.append(iname)
return jsonify(images = Images)
def allocatePm(cpu , ram , disk):
pmfileopen = open("pm_file" , "r")
pmfileinfo = pmfileopen.readlines()
pmusername = ""
retvalue = 0
###print "*"
global pmallocated
global allocated
#print "**"
for i in pmfileinfo:
#print "***"
j = i.split()
pmusername = j[1]
retvalue = int(j[0])
pmallocated = int(j[0])
conn = libvirt.open("qemu+ssh://"+str(pmusername)+"/system")
print pmusername
pm_cpu = conn.getMaxVcpus(None)
#pm_mem = conn.getMemoryStats(0 , 0)
pm_mem = {}
fileopen = open("/proc/meminfo" , "r")
meminfo = fileopen.readlines()
for x in meminfo:
y = x.split()
if y[0]=="MemFree:":
pm_mem['ram'] = int(y[1])
break
fileopen.close()
########################check here wheather cpu is automatically subtracted or we have to do that
if int(pm_mem['ram']) >= int(ram):
if int(pm_cpu) >= int(cpu):
pmid_allocated = 1 #has to be changed when for loop is used
allocated = 1
return int(j[0]) #for now 1 later iterator
return 0 #0 means no PM found
return 0
if __name__ == "__main__":
#conn = libvirt.open("qemu:///system")
########################AS WE ARE ASKED TO ASSIGN UNIQUE PMIDS ########################
fileopen = open("pm_file" , "r+")
fileinfo = fileopen.readlines()
fileopen.seek(0)
pmid = 2 #pmid=1 for the localhost
for i in fileinfo:
t = i.split()
if len(t) == 2:
if int(t[0]) > pmid:
pmid = int(t[0])+1
for i in fileinfo:
t = i.split()
if len(t)==1:
pmid = pmid + 1
fileopen.write(str(pmid)+"\t\t"+i)
else:
fileopen.write(i)
fileopen.truncate()
fileopen.close()
###########################ASSIGINING UNIQUE PMIDS DONE HERE############################
#######AS WE ARE ASKED TO ASSIGN UNIQUE IDS TO IMAGES########################
fileopen = open("image_file" , "r+")
fileinfo = fileopen.readlines()
fileopen.seek(0)
image_id = 1
for i in fileinfo:
t = i.split()
if len(t)==2:
if int(t[0]) > image_id:
image_id = int(t[0]) + 1
for i in fileinfo:
t = i.split()
if len(t)==1:
fileopen.write(str(image_id)+"\t\t"+i)
image_id = image_id + 1
else:
fileopen.write(i)
fileopen.truncate()
fileopen.close()
########################ASSIGNING UNIQUE IDS TO IMAGES DONE ###################
app.run(debug = True)
<file_sep>/bin/test.py
import json
json_data = open('/home/srikailash/project/bin/info').read()
data = json.loads(json_data)
with open('/home/srikailash/project/bin/use.json' , 'w') as f:
json.dumps(data , f)
<file_sep>/src/test.py
file = open("vmidinfo" , "a")
file.write("kailash")
file.write("\n")
file.close()
|
0865f031737768573f55b8b1235461357637ed47
|
[
"Python"
] | 3 |
Python
|
srikailash/miniproject
|
82af338af0f8a74f2f571346b8e6677a10094f31
|
1f2466f001dbc705df2e61009e54dbcdff8d5da2
|
refs/heads/master
|
<repo_name>Carghaez/JSnake<file_sep>/assets/js/model/Model.js
function Model(_cols, _rows)
{
this.cols = _cols;
this.rows = _rows;
this.snake = null;
this.foods = null;
this.randomPos = function()
{
var posX = parseInt(Math.random() * this.cols);
var posY = parseInt(Math.random() * this.rows);
return new Position(posX, posY);
}
this.centerPos = function()
{
var posX = parseInt(this.cols/2);
var posY = parseInt(this.rows/2);
//console.log('Gridsize:', this.cols, this.rows);
return new Position(posX, posY);
}
this.checkGameOver = function()
{
return this.snake.checkBodyPos(this.snake.getPos());
}
this.makeMove = function(pos, direction)
{
switch (direction)
{
case DIR_LEFT:
pos.x--;
if(pos.x < 0) {
pos.x = (this.cols-1);
}
break;
case DIR_RIGHT:
pos.x++;
if(pos.x > (this.cols-1)) {
pos.x = 0;
}
break;
case DIR_UP:
pos.y--;
if(pos.y < 0) {
pos.y = (this.rows-1);
}
break;
case DIR_DOWN:
pos.y++;
if(pos.y > (this.rows-1)) {
pos.y = 0;
}
break;
}
return pos;
}
this.hasEaten = function()
{
return this.foods.eat(this.snake.getPos());
}
this.update = function()
{
this.snake.move(this.hasEaten());
}
this.changeSnakeDir = function(dir)
{
this.snake.changeDir(dir);
}
this.isEmpty = function(_pos)
{
if(this.foods.checkPos(_pos))
return false;
if(this.snake.checkPos(_pos))
return false;
return true;
}
this.init = function(_nick)
{
this.snake = new SnakeModel(_nick, this.centerPos());
this.foods = new FoodsModel();
this.snake.randomDir();
}
}<file_sep>/assets/js/view/View.js
function View(_grid_width, _grid_height)
{
this.$board;
this.$snake;
this.grid_width = _grid_width;
this.grid_height = _grid_height;
this.init = function()
{
this.$board = document.getElementById('board');
this.$board.style.width = this.getWidth() + 'px';
this.$board.style.height = this.getHeight() + 'px';
this.$snake = document.createElement('div');
this.$snake.id = 'snake';
this.$snake.style.width = this.grid_width + 'px';
this.$snake.style.height = this.grid_height + 'px';
this.$board.appendChild(this.$snake);
this.updateSnake();
}
this.getWidth = function()
{
return G_model.cols * this.grid_width;
}
this.getHeight = function()
{
return G_model.rows * this.grid_height;
}
this.update = function()
{
this.updateSnake();
this.updateFoods();
this.updateScore();
}
this.updateSnake = function()
{
var pos = G_model.snake.getPos();
this.$snake.style.left = (pos.x * this.grid_width) + 'px';
this.$snake.style.top = (pos.y * this.grid_height) + 'px';
switch (G_model.snake.getDir())
{
case DIR_UP:
this.$snake.style.transform = 'rotate(180deg)';
break;
case DIR_RIGHT:
this.$snake.style.transform = 'rotate(270deg)';
break;
case DIR_LEFT:
this.$snake.style.transform = 'rotate(90deg)';
break;
case DIR_DOWN:
this.$snake.style.transform = 'rotate(0deg)';
break;
}
for (var i = 0; i < G_model.snake.lastBodyId(); i++) {
body = G_model.snake.existsBody('body-'+i);
if(body) {
var tmp = document.getElementById(body.getId());
if(!tmp) {
var temp = document.createElement('div');
temp.id = body.getId();
temp.classList.add('body');
temp.style.width = this.grid_width-2 + 'px';
temp.style.height = this.grid_height-2 + 'px';
temp.style.left = (body.getPos().x*this.grid_width) + 'px';
temp.style.top = (body.getPos().y*this.grid_height) + 'px';
this.$board.appendChild(temp);
}else{
tmp.style.left = (body.getPos().x*this.grid_width) + 'px';
tmp.style.top = (body.getPos().y*this.grid_height) + 'px';
}
} else {
var tmp = document.getElementById('body-'+i);
if(tmp) {
tmp.parentNode.removeChild(tmp);
}
}
}
}
this.updateFoods = function()
{
var food;
for (var i = 0; i < G_model.foods.last_id; i++) {
food = G_model.foods.exists('food-'+i);
if(food) {
if(!document.getElementById(food.id)) {
var temp = document.createElement('div');
temp.id = food.id;
temp.classList.add('food-'+this.randomFoodType());
temp.style.width = this.grid_width + 'px';
temp.style.height = this.grid_height + 'px';
temp.style.left = (food.pos.x*this.grid_width) + 'px';
temp.style.top = (food.pos.y*this.grid_height) + 'px';
this.$board.appendChild(temp);
}
} else {
var tmp = document.getElementById('food-'+i);
if(tmp) {
tmp.parentNode.removeChild(tmp);
}
}
}
}
this.updateScore = function()
{
document.getElementById('score').textContent = 'Speed: '+ (1 /G_controller.getInterval()).toFixed(4)+'\nScore: '+ G_model.snake.length();
if(G_controller.status == GAME_OVER) {
document.getElementById('score').textContent += '\nGame Over!';
}
}
this.randomFoodType = function()
{
return parseInt(Math.random()*FOOD_TIPES);
}
}<file_sep>/assets/js/main.js
function main() {
G_controller = new GameController();
G_controller.init('Luca');
}
document.addEventListener('DOMContentLoaded', main);
<file_sep>/assets/js/model/SnakeModel.js
SnakeHead = (function() {
function SnakeHead(_pos)
{
var pos = _pos;
this.move = function(_dir)
{
pos = G_model.makeMove(pos, _dir);
}
this.getPos = function()
{
return pos;
}
this.checkPos = function(_pos)
{
return pos.equal(_pos);
}
}
return SnakeHead;
})();
SnakeBody = (function() {
function SnakeBody(_bodySucc, _id)
{
var pos = Object.assign({}, _bodySucc.getPos());
var succ = _bodySucc;
var id = 'body-'+_id;
this.move = function()
{
pos = Object.assign({}, succ.getPos());
}
this.getPos = function()
{
return pos;
}
this.getId = function()
{
return id;
}
this.checkPos = function(_pos)
{
return pos.equal(_pos);
}
}
return SnakeBody;
})();
var SnakeModel = (function() {
function SnakeModel(_nick, _pos)
{
var nick = _nick;
var dir = [DIR_DOWN];
var head = new SnakeHead(_pos);
var body = [];
var last_id = 0;
this.getNickname = function()
{
return nick;
};
this.length = function()
{
return body.length;
};
// Funzione richiamata dall'evento alla pressione di un tasto
this.changeDir = function(_dir)
{
if(dir.last() === _dir) {
return;
}
if(dir.length >= 3) {
return;
}
if(body.length > 0 && (
(dir.last() === DIR_LEFT && _dir === DIR_RIGHT ) ||
(dir.last() === DIR_RIGHT && _dir === DIR_LEFT ) ||
(dir.last() === DIR_UP && _dir === DIR_DOWN ) ||
(dir.last() === DIR_DOWN && _dir === DIR_UP ))) {
return;
}
dir.push(_dir);
}
this.getDir = function()
{
return dir[0];
}
// Funzione che viene chiamata nell'update del gioco al posto di move()
// quando viene mangiato del cibo
this.createBody = function()
{
if(body.length > 0) {
body.push(new SnakeBody(body.last(), last_id++));
} else {
body.push(new SnakeBody(head, last_id++));
}
}
// Funzione di aggiornamento dello snake nell'update del gioco
this.move = function(hasEaten)
{
if(dir.length > 1) {
dir.removeByIndex(0);
}
if(hasEaten) {
this.createBody();
}
for (var i = body.length-1; i >= 0; --i) {
body[i].move();
}
head.move(dir[0]);
}
this.getPos = function()
{
// restituire pos della testa
return head.getPos();
}
this.checkPos = function(_pos)
{
var trovato = false;
if(head.checkPos(_pos)) {
trovato = true;
}
if(trovato)
return trovato;
return this.checkBodyPos(_pos);
}
this.checkBodyPos = function(_pos)
{
var trovato = false;
var i = 0;
while(!trovato && i < body.length) {
if(body[i].checkPos(_pos)) {
trovato = true;
}else{
i++;
}
}
return trovato;
}
this.lastBodyId = function()
{
return last_id;
}
this.existsBody = function(_id)
{
var i = 0;
var trovato = false;
while (!trovato && i < body.length) {
if (body[i].getId() === _id) {
trovato = body[i];
}else{
i++;
}
}
return trovato;
}
this.randomDir = function()
{
switch (parseInt(Math.random()*4)) {
case 0:
this.changeDir(DIR_LEFT);
break;
case 1:
this.changeDir(DIR_DOWN);
break;
case 2:
this.changeDir(DIR_RIGHT);
break;
case 3:
this.changeDir(DIR_UP);
break;
}
}
}
return SnakeModel;
})();
<file_sep>/assets/js/model/FoodsModel.js
function FoodModel(_id, _pos) {
this.id = _id;
this.pos = _pos;
}
function FoodsModel()
{
this.last_id = 0;
this.food = [];
this.max_foods = MAX_FOODS;
this.make = function()
{
if(this.food.length < MAX_FOODS) {
var pos;
do {
pos = G_model.randomPos();
} while(!G_model.isEmpty(pos));
this.food.push(new FoodModel('food-'+this.last_id++, pos));
}
}
this.checkPos = function(_pos)
{
var trovato = false;
var i = 0;
while(!trovato && i < this.food.length) {
if(this.food[i].pos.equal(_pos)) {
trovato = true;
} else {
i++;
}
}
return trovato;
}
this.eat = function(_pos)
{
var mangiato = false;
var i = 0;
while(!mangiato && i < this.food.length) {
if(this.food[i].pos.equal(_pos)) {
this.food.splice(i, 1);
mangiato = true;
} else {
i++;
}
}
return mangiato;
}
this.exists = function(_id)
{
var i = 0;
var trovato = false;
while (!trovato && i < this.food.length) {
if (this.food[i].id === _id) {
trovato = this.food[i];
} else {
i++;
}
}
return trovato;
}
}
|
0f528bd0f0f0674c04e50588a30d0c56eed8d3be
|
[
"JavaScript"
] | 5 |
JavaScript
|
Carghaez/JSnake
|
bb38a4189eb20d76e855c3024cc0c820d5547cc8
|
df07ff5c766525ddeebbd824f49ffacc7b32adcf
|
refs/heads/master
|
<file_sep>import React, { Component } from 'react';
import styled from 'styled-components';
const Button = styled.button`
font-family: san-serif;
font-size:1.4rem;
border:none;
border-radius:5px;
`
class ComponentWithState extends Component {
constructor() {
super();
this.state = {
name: 'React'
};
}
render() {
return (
<div>
<Button>React</Button>
</div>
);
}
}
export default ComponentWithState;
|
5cf7f978eb69cfd0cf290d929ebe12b5865cdaff
|
[
"JavaScript"
] | 1 |
JavaScript
|
lklsquare/reactTemplate
|
f66787f97266ef2dfc4f6dbdcaca9d28ffa315ec
|
7adf9a60e1f809d3dbc54bdfc7ae0b84f0f459ed
|
refs/heads/master
|
<repo_name>Gabelbombe/aws-reservedinstances<file_sep>/run/app.php
<?php date_default_timezone_set('America/Los_Angeles');
error_reporting(-1);
ini_set('display_errors', 1);
require dirname(__DIR__) . '/vendor/autoload.php';
define('ENV_MODE', getenv('ENV_MODE') ? getenv('ENV_MODE') : 'Dev');
define('ENV_FILE', dirname(__DIR__) . '/config/' . (getenv('APP_ENV') ?: 'config') . '.json');
define('APP_PATH', dirname(__DIR__));
define('CONF_DIR', APP_PATH . '/config');
$payload =
[
'type' => (! isset($argv) ?: 0),
'args' => (! isset($argv) ? $_GET : $argv),
];
$bootstrap = New \Helpers\Bootstrap($payload);
$bootstrap->run();
<file_sep>/src/ServiceProvider/ConfigDriver.php
<?php
/**
* Move to Interface
*/
Namespace ServiceProvider
{
Interface ConfigDriver
{
/**
* @param $filename
* @return mixed
*/
function load($filename);
/**
* @param $filename
* @return mixed
*/
function supports($filename);
}
}<file_sep>/src/Helpers/Bootstrap.php
<?php
Namespace Helpers
{
USE ServiceProvider\ConfigServiceProvider AS Config;
USE ServiceMapper\AwsServiceMapper AS Mapper;
Class Bootstrap
{
private $args = [];
/**
* Bootstrap constructor.
* @param array $payload
*/
public function __construct(array $payload = [])
{
if (! $payload['type'] ?: 0) parse_str(implode("&", array_slice($payload['args'], 1)), $_GET);
$config = New Config(ENV_FILE);
$config->register([
'ReservedInstances' => [
'Accounts' => []
]]);
$this->setArguments($_GET, ['DryRun'] ); //filter if reqs
$this->setArguments($config->getConfig(), ['Accounts']);
}
public function run()
{
$map = New Mapper('services.json'); //should be abstracted into a variable somewhere...
if (isset($this->args ['Accounts']) && ! empty($this->args ['Use']))
{
foreach ($this->args ['Accounts'] AS $name => $number)
{
foreach ($this->args ['Use'] [$name] AS $service)
{
echo "{$number}: " . ucfirst($service) . "Client\n";
}
}
}
}
/**
* Set requirements
*
* @param array $args
* @param array $reqs * @return $this
*/
protected function setArguments(array $args = [], array $reqs)
{
if (empty($args)) Throw New \RuntimeException('Input cannot be empty, terminating...');
foreach ($reqs AS $requirement) if (! isset($args[$requirement])) {
Throw New \OutOfBoundsException("Arguments requires: $requirement");
}
$this->args = ($args + $this->args);
return $this;
}
private function get($string)
{
return (isset($this->args->$string)) ? $this->args->$string : null;
}
}
}<file_sep>/src/Amazon/Ec2.php
<?php
Namespace Amazon
{
USE Aws\Ec2\Ec2Client AS Ec2Client;
Class Ec2 Extends Ec2Client
{
protected $credentials = [];
public function __construct() {
$this->client = New Ec2Client([
]);
}
// public describeReservedInstances ( array $args = array() )
//
// Executes the DescribeReservedInstances operation.
// public describeReservedInstancesListings ( array $args = array() )
//
// Executes the DescribeReservedInstancesListings operation.
// public describeReservedInstancesModifications ( array $args = array() )
//
// Executes the DescribeReservedInstancesModifications operation.
// public describeReservedInstancesOfferings ( array $args = array() )
//
// Executes the DescribeReservedInstancesOfferings operation.
}
}<file_sep>/src/ServiceMapper/AwsServiceMapper.php
<?php
Namespace ServiceMapper
{
USE ServiceProvider\ConfigServiceProvider AS Config;
Class AwsServiceMapper
{
protected $confDir = '';
public function __construct($filename)
{
$this->confDir = dirname(dirname(__DIR__)) . '/config/';
// if (! file_exists()) Throw New \Exception('Required config');
$this->setSeviceFile($filename);
die;
}
private function setSeviceFile($filename)
{
if (file_exists($this->confDir . $filename)) {
$config = New Config($this->confDir . $filename);
print_r($config); exit;
}
}
}
}
|
ac69baa8c756acac2f06569f6dcbe941e3442a55
|
[
"PHP"
] | 5 |
PHP
|
Gabelbombe/aws-reservedinstances
|
f77c2a3dd65a24d4145894269315a4a5436338eb
|
ec7a360627b74b71cfa06d5b4bef699f7497285b
|
refs/heads/master
|
<file_sep>import requests
import json
import pandas as pd
# POST to API
payload = {'country': 'Germany'}
URL = 'https://api.statworx.com/covid'
response = requests.post(url=URL, data=json.dumps(payload))
# Convert to data frame
df = pd.DataFrame.from_dict(json.loads(response.text))
df.head()
|
1ae66ff56089e96082f3cb0cb23190a24ac410b6
|
[
"Python"
] | 1 |
Python
|
iMarcello/covid-19-api
|
52931deceeb2cf9161acee40de029abca5db1d35
|
eb307fd708cacf1b43918c86e0df5665e2575609
|
refs/heads/master
|
<file_sep><?php
namespace App\Entity;
use App\Entity\Traits\CreatedAtTrait;
use App\Entity\Traits\UpdatedAtTrait;
use Doctrine\ORM\Mapping;
/**
*
* @Mapping\Table(name="college")
* @Mapping\Entity(repositoryClass="App\Repository\CollegeRepository")
* @Mapping\HasLifecycleCallbacks()
*
*/
class College
{
use CreatedAtTrait, UpdatedAtTrait;
/**
* @var int
*
* @Mapping\Column(name="id", type="integer", unique=true)
* @Mapping\Id
* @Mapping\GeneratedValue(strategy="IDENTITY")
*/
private int $id;
/**
* @var string
*
* @Mapping\Column(name="name", type="string", nullable=false)
*/
private string $name;
/**
* @var string|null
*
* @Mapping\Column(name="img_url", type="string", length=2048, nullable=true)
*/
private ?string $imgUrl;
/**
* @var string
*
* @Mapping\Column(name="city", type="string", nullable=false)
*/
private string $city;
/**
* @var string
*
* @Mapping\Column(name="state", type="string", nullable=false)
*/
private string $state;
/**
* @var string|null
*
* @Mapping\Column(name="phone", type="string", nullable=true)
*/
private ?string $phone;
/**
* @var string
*
* @Mapping\Column(name="address", type="string", nullable=false)
*/
private string $address;
/**
* @var string|null
*
* @Mapping\Column(name="website_url", type="string", length=2048, nullable=true)
*/
private ?string $website;
/**
* @return int
*/
public function getId(): int
{
return $this->id;
}
/**
* @param int $id
*/
public function setId(int $id): void
{
$this->id = $id;
}
/**
* @return string
*/
public function getName(): string
{
return $this->name;
}
/**
* @param string $name
*/
public function setName(string $name): void
{
$this->name = $name;
}
/**
* @return string|null
*/
public function getImgUrl(): ?string
{
return $this->imgUrl;
}
/**
* @param string|null $imgUrl
*/
public function setImgUrl(?string $imgUrl): void
{
$this->imgUrl = $imgUrl;
}
/**
* @return string
*/
public function getCity(): string
{
return $this->city;
}
/**
* @param string $city
*/
public function setCity(string $city): void
{
$this->city = $city;
}
/**
* @return string
*/
public function getState(): string
{
return $this->state;
}
/**
* @param string $state
*/
public function setState(string $state): void
{
$this->state = $state;
}
/**
* @return string|null
*/
public function getPhone(): ?string
{
return $this->phone;
}
/**
* @param string|null $phone
*/
public function setPhone(?string $phone): void
{
$this->phone = $phone;
}
/**
* @return string
*/
public function getAddress(): string
{
return $this->address;
}
/**
* @param string $address
*/
public function setAddress(string $address): void
{
$this->address = $address;
}
/**
* @return string|null
*/
public function getWebsite(): ?string
{
return $this->website;
}
/**
* @param string|null $website
*/
public function setWebsite(?string $website): void
{
$this->website = $website;
}
}
<file_sep><?php
namespace App\Command;
use App\Parser\CollegesListPageCountParser;
use App\Parser\CollegesListParser;
use App\Parser\CollegesProfileParser;
use App\Service\CollegeService;
use Symfony\Component\Console\Command\Command;
use Symfony\Component\Console\Input\InputInterface;
use Symfony\Component\Console\Output\OutputInterface;
class CollectDataCollegesCommand extends Command
{
protected static $defaultName = 'app:collect-data-colleges';
private string $url = 'https://www.princetonreview.com/college-search?ceid=cp-1022984';
private CollegeService $collegeService;
public function __construct(CollegeService $collegeService)
{
$this->collegeService = $collegeService;
parent::__construct();
}
protected function configure()
{
$this
->setName('app:collect-data-colleges')
->setDescription('Collects data for all colleges from the list');
}
protected function execute(InputInterface $input, OutputInterface $output)
{
$url = $this->url;
$baseUrl = $this->getBaseUrl($url);
$collegesListPageCountParser = new CollegesListPageCountParser();
$pageCount = $collegesListPageCountParser->parse($url);
$urls = [$url];
for ($i = 2; $i <= $pageCount; $i++) {
$urls[] = $url . '&page=' . $i;
}
$collegesData = $this->getCollegesListData($urls);
$count = count($collegesData);
$output->writeln('[profiles]');
for ($i = 0; $i < count($collegesData); $i++) {
$profile_url = $baseUrl . $collegesData[$i]['profile_relative_link'];
$profileData = $this->getCollegeProfileDate($profile_url);
$collegesData[$i]['address'] = $profileData['address'];
$collegesData[$i]['website_url'] = $profileData['website_url'];
$collegesData[$i]['phone'] = $profileData['phone'];
$output->writeln($count . '|' . ($i + 1));
}
$this->updateCollegesData($collegesData);
return Command::SUCCESS;
}
private function updateCollegesData(array $collegesData = [])
{
$updated_ids = array();
foreach ($collegesData as $collegeData) {
$collegeFromDB = $this->collegeService->getCollegeByNameAndCity($collegeData['name'], $collegeData['city']);
if ($collegeFromDB === null) {
$updated_ids[] = $this->collegeService->createCollege(
$collegeData['name'],
$collegeData['address'],
$collegeData['city'],
$collegeData['state'],
$collegeData['img_url'],
$collegeData['phone'],
$collegeData['website_url']
);
} else {
$updated_ids[] = $collegeFromDB->getId();
$this->collegeService->updateCollege(
$collegeFromDB->getId(),
$collegeData['name'],
$collegeData['address'],
$collegeData['city'],
$collegeData['state'],
$collegeData['img_url'],
$collegeData['phone'],
$collegeData['website_url']
);
}
}
$collegesDB = $this->collegeService->getAll();
foreach ($collegesDB as $collegeDB) {
if (!in_array($collegeDB->getId(), $updated_ids)) {
$this->collegeService->deleteCollege($collegeDB->getId());
}
}
}
private function getCollegesListData(array $urls = [])
{
$collegesListParser = new CollegesListParser();
return $collegesListParser->parse($urls);
}
private function getCollegeProfileDate(string $url)
{
$collegesListParser = new CollegesProfileParser();
return $collegesListParser->parse($url);
}
private function getBaseUrl($url): string
{
$parsedUrl = parse_url($url);
$scheme = isset($parsedUrl['scheme']) ? $parsedUrl['scheme'] . '://' : '';
$host = $parsedUrl['host'] ?? '';
return "$scheme$host";
}
}
<file_sep><?php
namespace App\Repository;
use Doctrine\ORM\EntityRepository;
class CollegeRepository extends EntityRepository
{
}
<file_sep><?php
namespace App\Parser;
use Symfony\Component\DomCrawler\Crawler;
class CollegesListPageCountParser
{
public function parse($url)
{
$ch = curl_init();
curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
curl_setopt($ch, CURLOPT_SSL_VERIFYPEER, false);
curl_setopt($ch, CURLOPT_URL, $url);
$html = curl_exec($ch);
return $this->extractFromHtml($html);
}
private function extractFromHtml($html)
{
$crawler = new Crawler($html);
$pageText = $crawler->filterXPath('//div[contains(@class,"col-sm-9 desktop-74p-width")]')
->filterXPath('//div[contains(@style, "text-align: center;")]')
->filter('div')->first()->text();
return (int) explode(' ', $pageText)[3];
}
}
<file_sep><?php
namespace App\Parser;
use Symfony\Component\DomCrawler\Crawler;
class CollegesListParser
{
public function parse(array $urls = [])
{
$result = [];
foreach ($urls as $url) {
$ch = curl_init();
curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
curl_setopt($ch, CURLOPT_SSL_VERIFYPEER, false);
curl_setopt($ch, CURLOPT_URL, $url);
curl_setopt($ch, CURLOPT_TIMEOUT, 15);
$html = curl_exec($ch);
while (!$html) {
$html = curl_exec($ch);
}
$result = array_merge($result, $this->extractFromHtml($html));
}
return $result;
}
private function extractFromHtml($html)
{
$crawler = new Crawler($html);
return $crawler->filterXPath('//div[contains(@class, "row vertical-padding")]')
->each(
function (Crawler $node, $i) {
$name = $node->filter('h2')->first()->text();
$profileRelativeLink = $node->filter('h2')->filter('a')->first()->attr('href');
$locationContainer = $node->filterXPath('//div[contains(@class, "location")]');
$city = '';
$state = '';
if ($locationContainer->count() != 0) {
$location = $locationContainer->first()->text();
[$city, $state] = explode(', ', $location);
}
$imgNodes = $node->filter('img');
$imgUrl = null;
if ($imgNodes->count() > 0) {
$imgUrl = $imgNodes->first()->attr('src');
}
return [
'name' => $name,
'city' => $city,
'state' => $state,
'img_url' => $imgUrl,
'profile_relative_link' => $profileRelativeLink
];
}
);
}
}
<file_sep><?php
namespace App\Service;
use App\Entity\College;
use App\Repository\CollegeRepository;
use Doctrine\ORM\EntityManagerInterface;
class CollegeService
{
/** @var EntityManagerInterface */
private EntityManagerInterface $entityManager;
private CollegeRepository $collegeRepository;
/**
* CollegeService constructor.
* @param EntityManagerInterface $entityManager
*/
public function __construct(EntityManagerInterface $entityManager)
{
$this->entityManager = $entityManager;
$this->collegeRepository = $entityManager->getRepository(College::class);
}
/**
* @return College[]
*/
public function getAll(): array
{
return $this->collegeRepository->findAll();
}
/**
* @param string $name
* @param string $address
* @param string $city
* @param string $state
* @param string|null $imgUrl
* @param string|null $phone
* @param string|null $website
* @return int|null
*/
public function createCollege(
string $name,
string $address,
string $city,
string $state,
?string $imgUrl = null,
?string $phone = null,
?string $website = null
): ?int {
$college = new College();
$college->setName($name);
$college->setAddress($address);
$college->setCity($city);
$college->setState($state);
if ($imgUrl) {
$college->setImgUrl($imgUrl);
}
if ($phone) {
$college->setPhone($phone);
}
if ($website) {
$college->setWebsite($website);
}
$this->entityManager->persist($college);
$this->entityManager->flush();
return $college->getId();
}
/**
* @param string $name
* @return College|null
*/
public function getCollegeByNameAndCity(string $name, string $city): ?College
{
return $this->collegeRepository->findOneBy(['name' => $name, 'city' => $city]);
}
/**
* @param int $id
* @param string $name
* @param string $address
* @param string $city
* @param string $state
* @param string|null $imgUrl
* @param string|null $phone
* @param string|null $website
* @return int|null
*/
public function updateCollege(
int $id,
string $name,
string $address,
string $city,
string $state,
?string $imgUrl = null,
?string $phone = null,
?string $website = null
): ?int {
$college = $this->collegeRepository->find($id);
$college->setName($name);
$college->setAddress($address);
$college->setCity($city);
$college->setState($state);
if ($imgUrl) {
$college->setImgUrl($imgUrl);
}
if ($phone) {
$college->setPhone($phone);
}
if ($website) {
$college->setWebsite($website);
}
$this->entityManager->flush();
return $college->getId();
}
public function deleteCollege(int $id): bool
{
$college = $this->collegeRepository->find($id);
if ($college === null) {
return false;
}
$this->entityManager->remove($college);
$this->entityManager->flush();
return true;
}
}
<file_sep><?php
declare(strict_types=1);
namespace App\Entity\Traits;
use DateTime;
use Doctrine\ORM\Mapping;
/**
* @Mapping\HasLifecycleCallbacks
*/
trait UpdatedAtTrait
{
/**
* @var DateTime
*
* @Mapping\Column(name="updated_at", type="datetime", nullable=false)
*/
protected DateTime $updatedAt;
public function getUpdatedAt(): DateTime {
return $this->updatedAt;
}
/**
* @Mapping\PreUpdate
* @Mapping\PrePersist
*/
public function setUpdatedAt(): void {
$this->updatedAt = new DateTime();
}
}<file_sep><?php
namespace App\Parser;
use Exception;
use Symfony\Component\DomCrawler\Crawler;
class CollegesProfileParser
{
public function parse(string $url)
{
$ch = curl_init();
curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
curl_setopt($ch, CURLOPT_SSL_VERIFYPEER, false);
curl_setopt($ch, CURLOPT_URL, $url);
curl_setopt($ch, CURLOPT_TIMEOUT, 15);
$html = curl_exec($ch);
while (!$html) {
$html = curl_exec($ch);
}
return $this->extractFromHtml($html);
}
private function extractFromHtml($html)
{
$crawler = new Crawler($html);
$name = $crawler->filter('h1 > span')->first()->text();
$addressContainer = $websiteUrl = $crawler->filterXPath('//div[contains(@itemprop, "address")]');
$websiteUrl = '';
$address = '';
if ($addressContainer->count() != 0) {
$websiteUrlContainer = $addressContainer->filter('a');
if ($websiteUrlContainer->count() != 0) {
$websiteUrl = $websiteUrlContainer->first()->attr('href');
}
try {
$street = trim($crawler->filterXPath('//span[contains(@itemprop, "streetAddress")]')->first()->text());
$street = preg_replace('/\s+/', ' ', $street);
$city = trim($crawler->filterXPath('//span[contains(@itemprop, "addressLocality")]')->first()->text());
$city = preg_replace('/\s+/', ' ', $city);
$state = trim($crawler->filterXPath('//span[contains(@itemprop, "addressRegion")]')->first()->text());
$state = preg_replace('/\s+/', ' ', $state);
$postalCode = trim($crawler->filterXPath('//span[contains(@itemprop, "postalCode")]')->first()->text());
$address = "$street | $city, $state | $postalCode";
} catch (Exception $e) {
}
}
$phone = null;
$collegeContacts = $crawler
->filterXPath('//div[contains(@class, "school-contacts")]')
->filterXPath('//div[contains(@class, "col-sm-9")]')
->filterXPath('//div[contains(@class, "row")]');
for ($i = 0; $i < $collegeContacts->count(); $i++) {
$title = $collegeContacts->eq($i)
->filterXPath('//div[contains(@class, "col-xs-6 bold")]')
->first()
->text();
if (strcasecmp($title, 'phone') == 0) {
$phone = $collegeContacts->eq($i)
->filterXPath('//div[contains(@class, "col-xs-6")]')
->last()
->text();
}
}
return [
"name" => $name,
"address" => $address,
"website_url" => $websiteUrl,
"phone" => $phone,
];
}
}
|
7030b779407ec36420a8e35ae724fd6cbb755ac3
|
[
"PHP"
] | 8 |
PHP
|
ondrosru/TestTaskColleges
|
6b3b08cb5ccf0df929108df87ed161bb67d78901
|
5d1c9465b08f5aaeec994c3fc7d7240577c536d7
|
refs/heads/master
|
<file_sep>//google copy
const search = document.querySelector(".searchbar");
const google = document.querySelector(".Icon");
const googleSearch = document.querySelector(".googleSearch");
const feelingLucky = document.querySelector(".feelingLucky");
const lightsht = document.querySelector('.lightshot');
const gmail = document.querySelector('.gmail');
const effi = document.querySelector('.effi');
const clock = document.querySelector('.clock');
const X =[];
function reload(){
location.reload();
};
function makeUrl(a){
const inputValue = a.target.value;
X.push(inputValue);
};
function goToGoogle(){
const inputValue = X[X.length -1];
const url = `https://www.google.com/search?q=${inputValue}`;
location.href = url;
X = [];
};
function noKeyword(){
if(X.length === 0 || X[X.length -1] === ""){
alert('검색어를 입력하세요.');
} else{
goToGoogle();
}
};
function pressEnter(a){
if(a.which === 13){
noKeyword();
}
};
function timeClock(){
const currentTime = new Date();
const hour = currentTime.getHours();
const min = currentTime.getMinutes();
const sec = currentTime.getSeconds();
const tTime = `${hour < 10 ? `0${hour}` : hour}:${min < 10 ? `0${min}` : min}:${sec < 10 ? `0${sec}` : sec}`;
clock.innerHTML = tTime;
};
function workEnv(){
window.open("https://www.youtube.com");
};
function lightShot(){
window.open("https://prnt.sc");
};
function gMail(){
window.open("https://mail.google.com/mail/u/1/#inbox");
};
function Effi(){
window.open("https://effi.app");
};
function init(){
timeClock();
setInterval(timeClock, 1000);
}
search.addEventListener("input", makeUrl);
google.addEventListener("click", reload);
googleSearch.addEventListener("click", noKeyword);
lightsht.addEventListener("click", lightShot);
gmail.addEventListener("click",gMail);
effi.addEventListener('click',Effi);
search.addEventListener("keydown", pressEnter);
init();
|
59d1c8716a627e573dc41d2d65f2d5a14dbd7a47
|
[
"JavaScript"
] | 1 |
JavaScript
|
SamKimforCode/googleClone
|
de0be24fd0b87a9c7d9f182dbc878c2e750b6c86
|
1db9454ecb21138b4f037efb3fc1e377c308639d
|
refs/heads/master
|
<repo_name>nyarla/http-simulator<file_sep>/index.js
'use strict';
var http = require('http'),
net = require('net'),
qs = require('querystring'),
Buffer = require('buffer').Buffer,
URI = require('uri-template-lite').URI
;
exports.parseHTTPVersion = function parseHTTPVersion(data) {
var major, minor;
if ( typeof(data) === 'string' ) {
var src = data.split('.');
if ( src.length !== 2 ) {
throw new Error('invalid http version string: data => ' + data);
}
major = parseInt(src[0]);
if ( typeof(major) !== 'number' || major !== major ) {
throw new Error('invalid http version string: failed to parse major number: data => ' + data);
}
minor = parseInt(src[1]);
if ( typeof(minor) !== 'number' || minor !== minor) {
throw new Error('invalid http version string: failed to parse minor number: data => ' + data);
}
}
else if ( data instanceof Array ) {
if ( data.length !== 2 ) {
throw new Error('invalid http version array: the format of this array should be [number, number]');
}
major = data[0];
minor = data[1];
}
else if ( data instanceof Object ) {
if ( ! 'major' in data ) {
throw new Error('invalid http version Object: this Object should be included `major` property as number.');
}
if ( ! 'minor' in data ) {
throw new Error('invalid http version Object: this Object should be included `minor` property as number.');
}
major = data.major;
minor = data.minor;
}
else {
major = 1;
minor = 0;
}
if ( typeof(major) !== 'number' ) {
throw new Error('invalid http verison number: major version of http is not number.');
}
if ( typeof(minor) !== 'number' ) {
throw new Error('invalid http version number: minor version of http is not number.');
}
if ( major <= 0 ) {
throw new Error('invalid http version number: major version should be larger than 0');
}
if ( minor < 0 ) {
throw new Error('invalid http version number: minor version should not be smaller than 0');
}
return [ major, minor ];
}
exports.buildHTTPURL = function buildHTTPURL(base, params, queries) {
var url = URI.expand(base, params);
var query = qs.stringify(queries);
if ( query !== '' ) {
url += '?' + query;
}
return url;
}
exports.createHTTPIncomingMessage = function createHTTPIncomingMessage(v) {
var httpVersion = [ 1, 0 ];
if ( 'httpVersion' in v ) {
httpVersion = exports.parseHTTPVersion(v.httpVersion);
}
var method = 'GET';
if ( 'method' in v ) {
method = v.method.toUpperCase();
}
var path = 'http://localhost/';
if ( 'path' in v ) {
path = v.path;
}
var pathParams = {};
if ( 'pathParams' in v ) {
pathParams = v.pathParams;
}
var queryParams = {};
if ( 'queryParams' in v ) {
queryParams = v.queryParams;
}
var headers = [];
if ( 'headers' in v) {
headers = v.headers;
}
var req = new http.IncomingMessage();
req.httpVersionMajor = httpVersion[0];
req.httpVersionMinor = httpVersion[1];
req.httpVersion = httpVersion[0].toString(10) + '.' + httpVersion[1].toString(10);
req.method = method;
req.url = exports.buildHTTPURL(path, pathParams, queryParams);
for ( var idx = 0, len = headers.length; idx < len ; idx++ ) {
var data = headers[idx];
var raw = data[0];
var key = raw.toLowerCase();
var val = data[1];
switch ( key ) {
case 'age':
case 'authorization':
case 'content-length':
case 'content-type':
case 'etag':
case 'expires':
case 'from':
case 'host':
case 'if-modified-since':
case 'if-unmodified-since':
case 'last-modified':
case 'location':
case 'max-forwards':
case 'proxy-authorization':
case 'referer':
case 'retry-after':
case 'user-agent':
if ( typeof(req.headers[key]) === 'undefined' ) {
req.headers[key] = val
req.rawHeaders.push(raw);
req.rawHeaders.push(val);
}
break;
case 'set-cookie':
if ( typeof(req.headers[key]) === 'undefined' ) {
req.headers[key] = [ val ];
}
else {
req.headers[key].push(val);
}
req.rawHeaders.push(raw);
req.rawHeaders.push(val);
break;
default:
if ( typeof(req.headers[key]) === 'undefined' ) {
req.headers[key] = val;
}
else {
req.headers[key] = [ req.headers[key], val ].join(', ');
}
req.rawHeaders.push(raw);
req.rawHeaders.push(val);
break;
}
}
req.connection = req.socket = new net.Socket();
return req;
}
exports.createHTTPServerResponse = function createHTTPServerResponse(req, done) {
var res = new http.ServerResponse(req);
res.end = function (data, encoding, callback) {
if ( typeof(data) === 'function' ) {
callback = data;
encoding = null;
}
else if ( typeof(encoding) === 'function' ) {
callback = encoding;
encoding = null;
}
if ( data && typeof(data) !== 'string' && ! (data instanceof Buffer) ) {
throw new TypeError('First argument must be a string or Buffer');
}
if ( this.finished ) {
return false;
}
if ( ! this._header ) {
if (data) {
this._contentLength = ( typeof(data) === 'string' )
? Buffer.byteLength(data)
: data.length ;
}
else {
this._contentLength = 0;
}
this._implicitHeader();
}
if ( data && ! this._hasBody ) {
data = null;
}
var ret = {
statusCode: this.statusCode,
statusMessage: this.statusMessage,
headers: [],
body: ''
};
for ( var key in this._headers ) {
if ( this._headers.hasOwnProperty(key) ) {
ret.headers.push([ this._headerNames[key], this._headers[key] ]);
}
}
if (data) {
ret.body = ( typeof(encoding) === 'string' )
? data.toString(encoding)
: data.toString() ;
}
done(null, ret);
return true;
};
return res;
};
exports.createSimulator = function createSimulator(app) {
return function (v, done) {
try {
var r = exports.createHTTPIncomingMessage(v);
var w = exports.createHTTPServerResponse(r, done);
app(r, w);
if ( typeof(v['body']) !== 'undefined' && r.method !== 'HEAD' && r.method !== 'GET' ) {
r.emit('data', v.body);
}
r.emit('end');
}
catch (err) {
done(err, null);
}
}
};
<file_sep>/README.md
@nyarla/http-simulator
======================
* A utility functions for simulate http access without http server with node.js
[](https://travis-ci.org/nyarla/http-simulator)
SYNOPSIS
--------
```js
// load @nyarla/http-simulator
const L = require('@nyarla/http-simulator');
// this function is called from simulator function when end of http access simulation
const done = function (err, data) {
console.log(err); // error message (Error or null)
console.log(data.statusCode); // http status code (number)
console.log(data.statusMessage); // http status message (String)
console.log(data.headers); // Array of header-value pairs (Array< Array<K, V> >)
console.log(data.body); // response message (String or Buffer)
};
// web application handler on node.js
// req => require('http').IncomingMessage
// res => require('http').ServerResponse
const app = function (req, res) {
res.statusCode = 200;
res.setHeader('Content-Type', 'text/plain');
res.end('hello, world!');
};
// create simulator function
const fn = L.createSimualtor(app);
// calling web application handler by http access simulator
fn(
{
// simulated http version
// you can specified this value like as '1.0' or { major: 1, minor: 0 }
// default value is [ 1, 0 ]
httpVersion: [ 1, 0 ],
// simuated http method.
// this value is case-insensitive
// default value is 'GET'
method: 'put',
// request path.
// you can using URI template with pathParams in this value,
// and URI template is processing by `uri-template-lite`
// default value is 'http://localhost/'
path: 'http://localhost/accounts/{id}',
pathParams: {
id: 'nyarla'
},
// query paramters
// this value is processing by `querystring` module by node.js
// defualt value is {}
queryParams: {
action: 'put'
},
// http headers.
// default value is []
headers: [
[ 'Content-Type', 'text/plain' ],
[ 'X-Powered-By', 'http-simulator' ]
]
},
done
);
```
DEPENDENCES
-----------
* `http`, `net`, `buffer` and `querystring` in node.js modules
* [uri-template-lite](https://npmjs.com/packages/uri-template-lite)
AUTHOR
------
<NAME> (Nyarla) <<EMAIL>> <https://nyarla.net/> (this website is written by Japanese)
NOTES
-----
This module is overrides instnace method of `http.ServerResponse#end`,
and this function of using for overrides `http.ServerResponse#end` is
based on Javascript code of node.js's `http` core module.
LICENSE
-------
MIT
<file_sep>/test.js
'use strict';
var http = require('http');
var net = require('net');
var assert = require('power-assert');
var L = require('./index');
describe('@nyarla/http-simulator', function () {
context('#parseHTTPVersion', function () {
it('should be succeed of pasing these format string', function () {
assert.deepEqual( L.parseHTTPVersion('1.0'), [ 1, 0 ] );
assert.deepEqual( L.parseHTTPVersion('1.1'), [ 1, 1 ] );
assert.deepEqual( L.parseHTTPVersion('2.0'), [ 2, 0 ] );
});
it('should be failed of parsing these format string', function () {
assert.throws(function () { L.parseHTTPVersion('1.1.1') });
assert.throws(function () { L.parseHTTPVersion('A.1') });
assert.throws(function () { L.parseHTTPVersion('1.B') });
});
it('should be succeed of parsing these format array', function () {
assert.deepEqual( L.parseHTTPVersion([1,0]), [1,0] );
assert.deepEqual( L.parseHTTPVersion([1,1]), [1,1] );
assert.deepEqual( L.parseHTTPVersion([2,0]), [2,0] );
});
it('should be failed of parsing these format array', function () {
assert.throws(function () { L.parseHTTPVersion([1,2,3]) });
assert.throws(function () { L.parseHTTPVersion(['A', 1]) });
assert.throws(function () { L.parseHTTPVersion([ 1, 'B' ]) });
});
it('should be succeed of parsing these format object', function () {
assert.deepEqual( L.parseHTTPVersion({ major: 1, minor: 0 }), [ 1, 0 ] );
assert.deepEqual( L.parseHTTPVersion({ major: 1, minor: 1 }), [ 1, 1 ] );
assert.deepEqual( L.parseHTTPVersion({ major: 2, minor: 0 }), [ 2, 0 ] );
});
it('should be failed of parsing these format object', function () {
assert.throws(function () { L.parseHTTPVersion({ major: 1 }) });
assert.throws(function () { L.parseHTTPVersion({ minor: 1 }) });
assert.throws(function () { L.parseHTTPVersion({ }) });
});
it('should uses default values', function () {
assert.deepEqual(L.parseHTTPVersion(), [ 1, 0 ]);
});
it('should be failed these format numbers', function () {
assert.throws(function () { L.parseHTTPVersion('0.1') });
assert.throws(function () { L.parseHTTPVersion([ 0, 1 ]) });
assert.throws(function () { L.parseHTTPVersion({ major: 0, minor: 1 }) });
assert.throws(function () { L.parseHTTPVersion('-1.1') });
assert.throws(function () { L.parseHTTPVersion([ -1, 1 ]) });
assert.throws(function () { L.parseHTTPVersion({ major: -1, minor: 1 }) });
assert.throws(function () { L.parseHTTPVersion('1.-1') });
assert.throws(function () { L.parseHTTPVersion([ 1, -1 ]) });
assert.throws(function () { L.parseHTTPVersion({ major: 1, minor: -1 }) });
});
});
context('#buildHTTPURL', function () {
it('should generates http URL', function () {
assert.equal(
L.buildHTTPURL('http://localhost/{id}', { id: 'nyarla' }, { action: 'search' }),
'http://localhost/nyarla?action=search'
);
});
});
context('#createHTTPIncomingMessage', function () {
it('should be creatable http.IncomingMessage', function () {
var r = L.createHTTPIncomingMessage({
httpVersion: [ 1, 1 ],
method: 'put',
path: 'http://localhost/{id}',
pathParams: { id: 'nyarla' },
queryParams: { action: 'update' },
headers: [
[ 'X-PoweredBy', 'foo' ],
[ 'X-PoweredBy', 'bar' ],
[ 'User-Agent', 'foo' ],
[ 'User-Agent', 'bar' ],
[ 'Set-Cookie', 'foo' ],
[ 'Set-Cookie', 'bar' ]
]
});
assert.ok( r instanceof http.IncomingMessage );
assert.ok( r.connection instanceof net.Socket );
assert.ok( r.socket instanceof net.Socket );
assert.equal( r.httpVersionMajor, 1 );
assert.equal( r.httpVersionMinor, 1 );
assert.equal( r.httpVersion, "1.1" );
assert.equal( r.method, 'PUT' );
assert.equal( r.url, 'http://localhost/nyarla?action=update' );
assert.deepEqual(
r.headers,
{
'x-poweredby': 'foo, bar',
'user-agent': 'foo',
'set-cookie': [ 'foo', 'bar' ]
}
);
});
});
context('#createHTTPServerResponse', function () {
it('should be creatable http.ServerResponse', function (done) {
var app = function (r, w) {
w.statusCode = 200;
w.setHeader('Content-Type', 'text/plain');
w.end('hello, world!');
};
var fn = function (err, ret) {
assert.deepEqual(ret, {
statusCode: 200,
statusMessage: "OK",
headers: [
[ 'Content-Type', 'text/plain' ]
],
body: 'hello, world!'
});
done(err);
};
var r = L.createHTTPIncomingMessage({
method: 'get',
path: 'https://localhost/nyarla'
});
var w = L.createHTTPServerResponse(r, fn);
app(r, w);
});
});
context('#createSimulator', function () {
it('should returns the simulated function of http access without http server', function (done) {
var app = function (r, w) {
w.statusCode = 200;
w.setHeader('Content-Type', 'text/plain');
w.end('hello, world!');
};
var req = {
method: 'GET',
url: 'http://localhost/nyarla',
};
var fn = L.createSimulator(app);
fn(req, function (err, ret) {
assert.deepEqual(ret, {
statusCode: 200,
statusMessage: 'OK',
headers: [
[ 'Content-Type', 'text/plain' ]
],
body: 'hello, world!'
});
done(err);
})
});
});
});
|
626de1ba232763936585117b9c2d921755e3240b
|
[
"JavaScript",
"Markdown"
] | 3 |
JavaScript
|
nyarla/http-simulator
|
5f7e20c942c16df44b33efe19a519fbee1f6adbb
|
9dd3cf76955fa92b4daef27f70c95b72cb9d7f1c
|
refs/heads/master
|
<file_sep>#include"pch.h"
#include "MazeTraversal.h"
#include<iostream>
#include<string>
#include<fstream>
using namespace std;
MazeTraversal::MazeTraversal(int size ):size(size){
maze = new char* [size];
for (int i = 0; i < size; i++) {
maze[i] = new char[size];
}
for (int i = 0; i < size; i++)
{
for (int j = 0; j < size; j++)
{
maze[i][j] = NULL;
}
}
LoadMaze();
curr_col = 0;
start_col = 0;
for (int i = 0; i < size; i++) {
if (maze[i][curr_col] == '.') {
curr_row = i;
start_row = i;
}
}
end_col = size - 1;
for (int i = 0; i < size; i++) {
if (maze[i][end_col] == '.') {
end_row = i;
break;
}
else {
end_row = -1;
}
}
Dir = 'R';
cellHeightPx = 50;
cellWidthPx = 50;
}
void MazeTraversal::LoadMaze(){ // call in constructor
ifstream input;
char c;
int i = 0,j = 0;
input.open("maze.txt");
if (input.fail()) {
cout << "UNABLE TO OPEN FILE" << endl;
}
if (input.is_open()) {
while (input >> c) {
maze[i][j] = c;
j++;
if (j % 12 == 0) {
i++;
j = 0;
}
}
}
input.close();
}
void MazeTraversal::moveUp() {
if (maze[curr_row - 1][curr_col] == '.') {
Beep(100, 50);
Dir = 'U';
curr_row = curr_row - 1;
}
//noend
/*if (curr_col == end_col - 1 && end_row == -1 && curr_row == this->size - 2) {
end_row = start_row;
end_col = start_col;
}*/
}
void MazeTraversal::moveLeft() {
if (maze[curr_row][curr_col - 1] == '.') {
Beep(100, 50);
Dir = 'L';
curr_col = curr_col - 1;
}
//noend
/*if (curr_col == end_col - 1 && end_row == -1 && curr_row == this->size - 2) {
end_row = start_row;
end_col = start_col;
}*/
}
void MazeTraversal::moveDown() {
if (maze[curr_row + 1][curr_col] == '.') {
Beep(100, 50);
Dir = 'D';
curr_row = curr_row + 1;
}
//noend
/*if (curr_col == end_col - 1 && end_row == -1 && curr_row == this->size - 2) {
end_row = start_row;
end_col = start_col;
}*/
}
void MazeTraversal::moveRight() {
if (maze[curr_row][curr_col + 1] == '.') {
Beep(100, 50);
Dir = 'R';
curr_col = curr_col + 1;
}
//noend
/*if (curr_col == end_col - 1 && end_row == -1 && curr_row == this->size - 2) {
end_row = start_row;
end_col = start_col;
}*/
}
bool MazeTraversal::DestinationReached(){
if (this->curr_col == this->end_col && this->curr_row == this->end_row) {
return true;
}
return false;
}
void MazeTraversal::display() {
for (int i = 0; i < this->size; i++) {
for (int j = 0; j < this->size; j++) {
cout << this->maze[i][j];
}
cout << endl;
}
}
void MazeTraversal::moveToNextCell() { // it will be needed for automation
//ifnoend
/*if (curr_col == end_col - 1 && end_row == -1 && curr_row == this->size-2) {
end_row = start_row;
end_col = start_col;
}*/
//current
if (curr_row == start_row && curr_col == start_col) {
moveRight();
}
//1 left and bottom wall
else if (maze[curr_row][curr_col - 1] == '#' && maze[curr_row + 1][curr_col] == '#' && maze[curr_row - 1][curr_col] == '.' && maze[curr_row][curr_col + 1] == '.') {
if (Dir == 'L') {
//Dir = 'U';
moveUp();
}
else if (Dir == 'D') {
//Dir = 'R';
moveRight();
}
}
//2 up and down wall
else if (maze[curr_row - 1][curr_col] == '#' && maze[curr_row + 1][curr_col] == '#' && maze[curr_row][curr_col + 1] == '.' && maze[curr_row][curr_col - 1] == '.') {
if (Dir == 'R') {
moveRight();
}
else if (Dir == 'L') {
moveLeft();
}
else if (Dir == 'D') {
moveDown();
}
else if (Dir == 'U') {
moveUp();
}
}
//3 right and botton wall
else if (maze[curr_row][curr_col+1] == '#' && maze[curr_row+1][curr_col] == '#' && maze[curr_row][curr_col-1] == '.' && maze[curr_row-1][curr_col] == '.') {
if (Dir == 'R') {
//Dir = 'U';
moveUp();
}
else if (Dir == 'D') {
//Dir = 'L';
moveLeft();
}
}
//4 left and up wall
else if (maze[curr_row][curr_col-1] == '#' && maze[curr_row-1][curr_col] == '#' && maze[curr_row][curr_col+1] == '.' && maze[curr_row+1][curr_col] == '.') {
if (Dir == 'U') {
//Dir = 'R';
moveRight();
}
else if (Dir == 'L') {
//Dir = 'D';
moveDown();
}
}
//5 right and up wall
else if (maze[curr_row][curr_col+1] == '#' && maze[curr_row-1][curr_col] == '#' && maze[curr_row][curr_col-1] == '.' && maze[curr_row+1][curr_col] == '.') {
if (Dir == 'R') {
//Dir = 'D';
moveDown();
}
else if (Dir == 'U') {
//Dir = 'L';
moveLeft();
}
}
//6 wall on bottom only
else if (maze[curr_row+1][curr_col] == '#' && maze[curr_row][curr_col-1] == '.' && maze[curr_row][curr_col+1] == '.' && maze[curr_row-1][curr_col] == '.') {
if (Dir == 'D') {
//Dir = 'L';
moveLeft();
}
else if (Dir == 'R') {
//Dir = 'R';
moveRight();
}
else if (Dir == 'L') {
//Dir = 'U';
moveUp();
}
}
//7 wall on top, bottom, left
else if (maze[curr_row-1][curr_col] == '#' && maze[curr_row+1][curr_col] == '#' && maze[curr_row][curr_col-1] == '#' && maze[curr_row][curr_col+1] == '.') {
if (Dir == 'L') {
//Dir = 'R';
moveRight();
}
}
//8 no wall on top,bottom,right and left
else if (maze[curr_row+1][curr_col] == '.' && maze[curr_row-1][curr_col] == '.' && maze[curr_row][curr_col+1] == '.' && maze[curr_row][curr_col-1] == '.') {
if (Dir == 'R') {
//Dir = 'D';
moveDown();
}
else if (Dir == 'U') {
//Dir = 'R';
moveRight();
}
else if (Dir == 'L') {
//Dir = 'U';
moveUp();
}
else if (Dir == 'D') {
//Dir = 'L';
moveLeft();
}
}
//9 wall to left only
else if (maze[curr_row][curr_col-1] == '#' && maze[curr_row-1][curr_col] == '.' && maze[curr_row+1][curr_col] == '.' && maze[curr_row][curr_col+1] == '.') {
if (Dir == 'D') {
//Dir = 'D';
moveDown();
}
else if (Dir == 'U') {
//Dir = 'R';
moveRight();
}
else if (Dir == 'L') {
//Dir = 'U';
moveUp();
}
}
//10 wall on top only
else if (maze[curr_row-1][curr_col] == '#' && maze[curr_row+1][curr_col] == '.' && maze[curr_row][curr_col-1] == '.' && maze[curr_row][curr_col+1] == '.') {
if (Dir == 'R') {
//Dir = 'D';
moveDown();
}
else if (Dir == 'L') {
//Dir = 'L';
moveLeft();
}
else if (Dir == 'U') {
//Dir = 'R';
moveRight();
}
}
//11 wall on right only
else if (maze[curr_row][curr_col+1] == '#' && maze[curr_row][curr_col-1] == '.' && maze[curr_row+1][curr_col] == '.' && maze[curr_row-1][curr_col] == '.') {
if (Dir == 'D') {
//Dir = 'L';
moveLeft();
}
else if (Dir == 'U') {
//Dir = 'U';
moveUp();
}
else if (Dir == 'R') {
// Dir = 'D';
moveDown();
}
}
//12 wall on top,right,left
else if (maze[curr_row-1][curr_col] == '#' && maze[curr_row][curr_col+1] == '#' && maze[curr_row][curr_col-1] == '#' && maze[curr_row+1][curr_col] == '.') {
if (Dir == 'U') {
//Dir = 'D';
moveDown();
}
}
//13 wall on right,up and down
else if (maze[curr_row-1][curr_col] == '#' && maze[curr_row][curr_col+1] == '#' && maze[curr_row+1][curr_col] == '#' && maze[curr_row][curr_col-1] == '.') {
if (Dir == 'R') {
//Dir = 'L';
moveLeft();
}
}
//14 wall on bottom,left,right
else if (maze[curr_row+1][curr_col] == '#' && maze[curr_row][curr_col+1] == '#' && maze[curr_row][curr_col-1] == '#' && maze[curr_row-1][curr_col] == '.') {
if (Dir == 'D') {
//Dir = 'U';
moveUp();
}
}
//15 right and left wall
else if (maze[curr_row ][curr_col-1] == '#' && maze[curr_row ][curr_col+1] == '#'&& maze[curr_row - 1][curr_col] == '.' && maze[curr_row + 1][curr_col] == '.') {
if (Dir == 'R') {
moveRight();
}
else if (Dir == 'L') {
moveLeft();
}
else if (Dir == 'D') {
moveDown();
}
else if (Dir == 'U') {
moveUp();
}
}
//ifnoend
//handeled in move functions
}
MazeTraversal::~MazeTraversal() {
for (int i = 0; i < this->size; i++) {
delete[]maze[i];
}
delete[]maze;
}
//getter functions
int MazeTraversal::getCellWidthPx() {
return this->cellWidthPx;
}
int MazeTraversal::getCellHeightPx() {
return this->cellHeightPx;
}
int MazeTraversal::getSize() {
return this->size;
}
int MazeTraversal::getStartRow() {
return this->start_row;
}
int MazeTraversal::getStartCol() {
return this->start_col;
}
int MazeTraversal::getCurrRow() {
return this->curr_row;
}
int MazeTraversal::getCurrCol() {
return this->curr_col;
}
char MazeTraversal::getDir() {
return this->Dir;
}
char MazeTraversal::getCellValueAt(int i, int j) {
if (i >= 0 && i < this->size) {
if (j >= 0 && j < this->size) {
return this->maze[i][j];
}
}
}
bool MazeTraversal::noend() {
int endrow;
for (int i = this->size - 2; i >=1; i--) {
if (this->maze[i][this->size - 2] == '.') {
endrow = i;
break;
}
}
if (curr_col == end_col - 1 && end_row == -1 && curr_row == endrow) {
end_row = start_row;
end_col = start_col;
return true;
}
return false;
}<file_sep>
// Homework5View.h : interface of the CHomework5View class
//
#pragma once
class CHomework5View : public CView
{
protected: // create from serialization only
CHomework5View() noexcept;
DECLARE_DYNCREATE(CHomework5View)
// Attributes
public:
CHomework5Doc* GetDocument() const;
// Operations
public:
// Overrides
public:
virtual void OnDraw(CDC* pDC); // overridden to draw this view
virtual BOOL PreCreateWindow(CREATESTRUCT& cs);
protected:
// Implementation
public:
virtual ~CHomework5View();
#ifdef _DEBUG
virtual void AssertValid() const;
virtual void Dump(CDumpContext& dc) const;
#endif
protected:
// Generated message map functions
protected:
DECLARE_MESSAGE_MAP()
public:
virtual void OnInitialUpdate();
void ImageFormation();
void cheeryFormation();
afx_msg void OnKeyDown(UINT nChar, UINT nRepCnt, UINT nFlags);
afx_msg void OnTimer(UINT_PTR nIDEvent);
void displaymessage();
void calltimer();
};
#ifndef _DEBUG // debug version in Homework5View.cpp
inline CHomework5Doc* CHomework5View::GetDocument() const
{ return reinterpret_cast<CHomework5Doc*>(m_pDocument); }
#endif
<file_sep>
// Homework5View.cpp : implementation of the CHomework5View class
//
#include "pch.h"
#include "framework.h"
#include <cstdlib>
// SHARED_HANDLERS can be defined in an ATL project implementing preview, thumbnail
// and search filter handlers and allows sharing of document code with that project.
#ifndef SHARED_HANDLERS
#include "Homework5.h"
#endif
#include "Homework5Doc.h"
#include "Homework5View.h"
#ifdef _DEBUG
#define new DEBUG_NEW
#endif
// CHomework5View
IMPLEMENT_DYNCREATE(CHomework5View, CView)
BEGIN_MESSAGE_MAP(CHomework5View, CView)
ON_WM_KEYDOWN()
ON_WM_TIMER()
END_MESSAGE_MAP()
// CHomework5View construction/destruction
CHomework5View::CHomework5View() noexcept
{
// TODO: add construction code here
}
CHomework5View::~CHomework5View()
{
}
BOOL CHomework5View::PreCreateWindow(CREATESTRUCT& cs)
{
// TODO: Modify the Window class or styles here by modifying
// the CREATESTRUCT cs
return CView::PreCreateWindow(cs);
}
// CHomework5View drawing
void CHomework5View::OnDraw(CDC* pDC)
{
CHomework5Doc* pDoc = GetDocument();
ASSERT_VALID(pDoc);
if (!pDoc)
return;
// TODO: add draw code for native data here
int size = pDoc->Mazeptr.getSize();
for (int i = 0; i < size; i++) {
for (int j = 0; j < size; j++) {
if (pDoc->Mazeptr.getCellValueAt(i, j) == '.') {
//pDC->FillSolidRect(j* pDoc->Mazeptr.getCellWidthPx(), i* pDoc->Mazeptr.getCellHeightPx(), pDoc->Mazeptr.getCellWidthPx(), pDoc->Mazeptr.getCellHeightPx(), RGB/*(100,100,0)*//*(30,0,9)*/(12,12,12));
CImage* pacmanimage = new CImage();
pacmanimage->Load(L"images\\pacmandot.png"); //images folder is placed in working directory
pacmanimage->Draw(pDC->m_hDC, j * pDoc->Mazeptr.getCellWidthPx(), i * pDoc->Mazeptr.getCellHeightPx(), pDoc->Mazeptr.getCellWidthPx(), pDoc->Mazeptr.getCellHeightPx());
pacmanimage->Destroy();
}
else if (pDoc->Mazeptr.getCellValueAt(i, j) == '#') {
/*pDC->FillSolidRect(x, y, pDoc->Mazeptr.getCellWidthPx(), pDoc->Mazeptr.getCellHeightPx(), RGB(253, 94, 83));*/
CImage* wallimage = new CImage();
wallimage->Load(L"images\\wall2.png"); //images folder is placed in working directory
wallimage->Draw(pDC->m_hDC, j * pDoc->Mazeptr.getCellWidthPx(), i * pDoc->Mazeptr.getCellHeightPx(), pDoc->Mazeptr.getCellWidthPx(), pDoc->Mazeptr.getCellHeightPx());
wallimage->Destroy();
}
}
}
cheeryFormation();
ImageFormation();
//AUTOMATION
SetTimer(1, 600, NULL);
calltimer();
//CImage* ghostimage = new CImage();
//ghostimage->Load(L"images\\lol.png"); //images folder is placed in working directory
//ghostimage->Draw(pDC->m_hDC, pDoc->Mazeptr.getStartCol() * pDoc->Mazeptr.getCellWidthPx(), pDoc->Mazeptr.getStartRow() * pDoc->Mazeptr.getCellHeightPx(), pDoc->Mazeptr.getCellWidthPx(), pDoc->Mazeptr.getCellHeightPx());
}
// CHomework5View diagnostics
#ifdef _DEBUG
void CHomework5View::AssertValid() const
{
CView::AssertValid();
}
void CHomework5View::Dump(CDumpContext& dc) const
{
CView::Dump(dc);
}
CHomework5Doc* CHomework5View::GetDocument() const // non-debug version is inline
{
ASSERT(m_pDocument->IsKindOf(RUNTIME_CLASS(CHomework5Doc)));
return (CHomework5Doc*)m_pDocument;
}
#endif //_DEBUG
// CHomework5View message handlers
void CHomework5View::OnInitialUpdate()
{
CView::OnInitialUpdate();
CHomework5Doc* pDoc = GetDocument();
ASSERT_VALID(pDoc);
if (!pDoc)
return;
CRect rcClient, rcWindow;
GetClientRect(&rcClient);
GetParentFrame()->GetWindowRect(&rcWindow);
// Calculate the difference
int nWidthDiff = rcWindow.Width() - rcClient.Width();
int nHeightDiff = rcWindow.Height() - rcClient.Height();
// Change the window size as 100x100
rcWindow.right = rcWindow.left + (pDoc->Mazeptr.getCellWidthPx()*pDoc->Mazeptr.getSize()) + nWidthDiff;
rcWindow.bottom = rcWindow.top + (pDoc->Mazeptr.getCellHeightPx() * pDoc->Mazeptr.getSize()) + nHeightDiff;
// The MoveWindow function resizes the frame window
GetParentFrame()->MoveWindow(&rcWindow);
// TODO: Add your specialized code here and/or call the base class
}
void CHomework5View::ImageFormation() {
CDC* pDC = GetDC();
CHomework5Doc* pDoc = GetDocument();
ASSERT_VALID(pDoc);
if (!pDoc)
return;
CImage* ghostimage = new CImage();
//right
if (pDoc->Mazeptr.getDir() == 'R') {
ghostimage->Load(L"images\\pacmanright.png"); //images folder is placed in working directory
ghostimage->Draw(pDC->m_hDC, pDoc->Mazeptr.getCurrCol() * pDoc->Mazeptr.getCellWidthPx(), pDoc->Mazeptr.getCurrRow() * pDoc->Mazeptr.getCellHeightPx(), pDoc->Mazeptr.getCellWidthPx(), pDoc->Mazeptr.getCellHeightPx());
}
//left
else if (pDoc->Mazeptr.getDir() == 'L') {
ghostimage->Load(L"images\\pacmanleft.png"); //images folder is placed in working directory
ghostimage->Draw(pDC->m_hDC, pDoc->Mazeptr.getCurrCol() * pDoc->Mazeptr.getCellWidthPx(), pDoc->Mazeptr.getCurrRow() * pDoc->Mazeptr.getCellHeightPx(), pDoc->Mazeptr.getCellWidthPx(), pDoc->Mazeptr.getCellHeightPx());
}
//down
else if (pDoc->Mazeptr.getDir() == 'D') {
ghostimage->Load(L"images\\pacmandown.png"); //images folder is placed in working directory
ghostimage->Draw(pDC->m_hDC, pDoc->Mazeptr.getCurrCol() * pDoc->Mazeptr.getCellWidthPx(), pDoc->Mazeptr.getCurrRow() * pDoc->Mazeptr.getCellHeightPx(), pDoc->Mazeptr.getCellWidthPx(), pDoc->Mazeptr.getCellHeightPx());
}
//up
else if (pDoc->Mazeptr.getDir() == 'U') {
ghostimage->Load(L"images\\pacmanup.png"); //images folder is placed in working directory
ghostimage->Draw(pDC->m_hDC, pDoc->Mazeptr.getCurrCol() * pDoc->Mazeptr.getCellWidthPx(), pDoc->Mazeptr.getCurrRow() * pDoc->Mazeptr.getCellHeightPx(), pDoc->Mazeptr.getCellWidthPx(), pDoc->Mazeptr.getCellHeightPx());
}
ghostimage->Destroy();
}
void CHomework5View::cheeryFormation() {
CDC* pDC = GetDC();
CHomework5Doc* pDoc = GetDocument();
ASSERT_VALID(pDoc);
if (!pDoc)
return;
for (int i = 0; i < pDoc->Mazeptr.getSize(); i++) {
int j = pDoc->Mazeptr.getSize() - 1;
if (pDoc->Mazeptr.getCellValueAt(i, j) == '.') {
CImage* cheeryimage = new CImage();
cheeryimage->Load(L"images\\cheery.png"); //images folder is placed in working directory
cheeryimage->Draw(pDC->m_hDC, j * pDoc->Mazeptr.getCellWidthPx(), i * pDoc->Mazeptr.getCellHeightPx(), pDoc->Mazeptr.getCellWidthPx(), pDoc->Mazeptr.getCellHeightPx());
cheeryimage->Destroy();
}
else if (pDoc->Mazeptr.noend() == true) {
CImage* cheeryimage = new CImage();
cheeryimage->Load(L"images\\cheery.png"); //images folder is placed in working directory
cheeryimage->Draw(pDC->m_hDC, pDoc->Mazeptr.getStartCol() * pDoc->Mazeptr.getCellWidthPx(), pDoc->Mazeptr.getStartRow() * pDoc->Mazeptr.getCellHeightPx(), pDoc->Mazeptr.getCellWidthPx(), pDoc->Mazeptr.getCellHeightPx());
cheeryimage->Destroy();
}
}
}
void CHomework5View::OnKeyDown(UINT nChar, UINT nRepCnt, UINT nFlags){
// TODO: Add your message handler code here and/or call default
CDC* pDC = GetDC();
CHomework5Doc* pDoc = GetDocument();
ASSERT_VALID(pDoc);
if (!pDoc)
return;
if (nChar == 'R') {
pDC->FillSolidRect(pDoc->Mazeptr.getCurrCol() * pDoc->Mazeptr.getCellWidthPx(), pDoc->Mazeptr.getCurrRow() * pDoc->Mazeptr.getCellHeightPx(), pDoc->Mazeptr.getCellWidthPx(), pDoc->Mazeptr.getCellHeightPx(), RGB(0, 0, 0));
pDoc->Mazeptr.moveRight();
cheeryFormation();
ImageFormation();
}
else if (nChar == 'L') {
pDC->FillSolidRect(pDoc->Mazeptr.getCurrCol() * pDoc->Mazeptr.getCellWidthPx(), pDoc->Mazeptr.getCurrRow() * pDoc->Mazeptr.getCellHeightPx(), pDoc->Mazeptr.getCellWidthPx(), pDoc->Mazeptr.getCellHeightPx(), RGB(0, 0, 0));
pDoc->Mazeptr.moveLeft();
cheeryFormation();
ImageFormation();
}
else if (nChar == 'U') {
pDC->FillSolidRect(pDoc->Mazeptr.getCurrCol() * pDoc->Mazeptr.getCellWidthPx(), pDoc->Mazeptr.getCurrRow() * pDoc->Mazeptr.getCellHeightPx(), pDoc->Mazeptr.getCellWidthPx(), pDoc->Mazeptr.getCellHeightPx(), RGB(0, 0, 0));
pDoc->Mazeptr.moveUp();
cheeryFormation();
ImageFormation();
}
else if (nChar == 'D') {
pDC->FillSolidRect(pDoc->Mazeptr.getCurrCol() * pDoc->Mazeptr.getCellWidthPx(), pDoc->Mazeptr.getCurrRow() * pDoc->Mazeptr.getCellHeightPx(), pDoc->Mazeptr.getCellWidthPx(), pDoc->Mazeptr.getCellHeightPx(), RGB(0, 0, 0));
pDoc->Mazeptr.moveDown();
cheeryFormation();
ImageFormation();
}
if (pDoc->Mazeptr.DestinationReached() == true) {
displaymessage();
}
CView::OnKeyDown(nChar, nRepCnt, nFlags);
}
void CHomework5View::OnTimer(UINT_PTR nIDEvent)
{
// TODO: Add your message handler code here and/or call default
if (nIDEvent == 1) {
CDC* pDC = GetDC();
CHomework5Doc* pDoc = GetDocument();
ASSERT_VALID(pDoc);
if (!pDoc)
return;
pDC->FillSolidRect(pDoc->Mazeptr.getCurrCol() * pDoc->Mazeptr.getCellWidthPx(), pDoc->Mazeptr.getCurrRow() * pDoc->Mazeptr.getCellHeightPx(), pDoc->Mazeptr.getCellWidthPx(), pDoc->Mazeptr.getCellHeightPx(), RGB(0, 0, 0));
pDoc->Mazeptr.moveToNextCell();
cheeryFormation();
ImageFormation();
if (pDoc->Mazeptr.DestinationReached() == true) {
KillTimer(1);
displaymessage();
}
}
CView::OnTimer(nIDEvent);
}
void CHomework5View::displaymessage() {
AfxMessageBox(_T("DESTINATION REACHED SUCCESSFULLY!"));
exit(0);
}
void CHomework5View::calltimer() {
CHomework5Doc* pDoc = GetDocument();
ASSERT_VALID(pDoc);
if (!pDoc)
return;
OnTimer(1);
}
<file_sep># Automated-MazeTraversal
It is an automated version of a game where ghost trapped in a maze find it's path out.
1-DATA MEMBERS:
char** maze;
int curr_row;
int curr_col;
int start_row;
int start_col;
int end_row;
int end_col;
char Dir; //direction, it will be needed for automation
const int size; //initialize it in member initializer list of class constructor
// data for MFC graphics display
int cellWidthPx; //A good value can be 40 or 60
int cellHeightPx;
2-FUNCTIONS(PUBLIC)
MazeTraversal(int size = 12);
void LoadMaze(); // call in constructor
void moveUp();
void moveLeft();
void moveDown();
void moveRight();
bool DestinationReached();
void display();
void moveToNextCell(); // it will be needed for automation
~MazeTraversal();
3-GETTER FUNCTIONS
int getCellWidthPx();
int getCellHeightPx();
int getSize();
int getStartRow();
int getStartCol();
int getCurrRow();
int getCurrCol();
char getCellValueAt(int i, int j); // the returned character is either ‘.’ or ‘#’
Data Members:
The 2d array maze will hold the data of maze.
The current position of ghost is stored by start_row and start_col.
The current position of ghost is stored by curr_row and curr_col. Initially it will be same as start position.
The destination point of maze is stored by end_row and end_col;
Member Functions:
1. MazeTraversal(int size = 12);
The Constructor initializes all the data members. Start and End cell values can be hard coded as will be
in the maze provided. It alos intitlizes char**maze 2D array to size by size.
2. void LoadMaze();
Loads the content of “maze.txt” (provided) into 2D array maze. It must be called in constructor
MazeTraversal(). In the two-dimensional array, the hashes represent the walls of the maze and the dots
represent squares in the possible paths through the maze. Moves can be made only to a location in the
array that contains a dot.
3. bool DestinationReached();
Checks if current position of ghost is same as the destination point, and returns true. Otherwise false is
returned.
4. void moveUp(); void moveLeft(); void moveDown(); void moveRight();
Move functions update the current position of ghost according to desired direction, Let us say current
position on maze is (i, j) then following possibilities of movements will occur in each function.
5. Getter Functions help access values of class data members in the View class
|
00002b30073f2e929a3ffc5ed5cee1b06a9752c2
|
[
"Markdown",
"C++"
] | 4 |
C++
|
Iqraa-Younas/Automated-MazeTraversal
|
1ab5cd90f9143b81931c7074e17157ea56de332a
|
3ac2c467be3f707fbc2f48a0561fc6722338ddac
|
refs/heads/master
|
<file_sep># Maintainer: <NAME> <<EMAIL>>
pkgname=firefox-gopassbridge
pkgver=0.1.1
pkgrel=1
pkgdesc='Gopass Bridge Firefox extension allows searching and inserting login credentials from the gopass password manager'
url=https://github.com/martinhoefling/gopassbridge
arch=('any')
license=('MIT')
source=("https://addons.cdn.mozilla.net/user-media/addons/852556/gopass_bridge-0.1.1-fx-linux.xpi")
noextract=("${source##*/}")
sha256sums=('9aec7235b52609b983a8e9d116a5a0be93bf618e6ba18d9d51ad7db72fecd9f9')
package() {
install -Dm644 "${source##*/}" "$pkgdir"/usr/lib/firefox/browser/extensions/gopass_bridge-0.1.1-fx-linux.xpi
}
# vim:set ts=2 sw=2 et:
|
e358230df856fa93aac5a22a2f73ecac7079e398
|
[
"Shell"
] | 1 |
Shell
|
mnussbaum/firefox-gopassbridge
|
0bbc89d65097a7fc6f97ed7c3f7533dc29a91605
|
32e8ac93e098e53334dcb4bce96510a4423cb8d4
|
refs/heads/master
|
<repo_name>lhw373/bike<file_sep>/hello.php
<?php
echo 'hello word!sss';
echo 'what are you doing';
?><file_sep>/1.php
<?php
echo 'aa';
echo 'bb';
echo 'cc';
echo 'dd';
echo 'ee';
echo 'ff';
echo 'gg';
echo 'hh';
echo 'ii';
echo 'll';
echo 'nn';
echo 'aaaa';
echo 'hello word!1111111'
echo 'what are you doing';
echo 'hello word!'
echo 'what are you doing';
echo 'hello word!'
echo 'what are you doing';
echo 'hello word!'
echo 'what are you doing';
echo 'hello word!'
echo 'what are you doing';
echo 'hello word!'
echo 'what are you doing';
echo 'hello word!'
echo 'what are you doing';
echo 'hello word!'
?>
|
ff5ced26190bb82437048a7c5f8acb8998433233
|
[
"PHP"
] | 2 |
PHP
|
lhw373/bike
|
fc270b61db20e131212658efcdd25b081f3de349
|
58c06906b9e093d2fd4062cf457a814f721673b2
|
refs/heads/master
|
<file_sep>using System;
using System.IO;
using System.Text.RegularExpressions;
namespace lab_2.Model
{
public static class FileModel
{
public static void WriteFile(string path, string ciphertext, string sourcetext)
{
string content = "<ciphertext>" + ciphertext + "</ciphertext>" + Environment.NewLine + "<sourcetext>" + sourcetext + "</sourcetext>";
using (StreamWriter writer = new StreamWriter(path))
writer.Write(content);
}
public static void WriteFile(string path, string ciphertext)
{
string content = "<ciphertext>" + ciphertext + "</ciphertext>";
using (StreamWriter writer = new StreamWriter(path))
writer.Write(content);
}
public static bool TryReadArray(string path, out string cipher_text, out string source_text)
{
string file_content;
using (StreamReader reader = new StreamReader(path))
file_content = reader.ReadToEnd();
Match cipher_text_match = Regex.Match(file_content, "<ciphertext>(.*?)</ciphertext>");
Match source_text_match = Regex.Match(file_content, "<sourcetext>(.*?)</sourcetext>");
cipher_text = cipher_text_match.Groups[1].Value;
source_text = source_text_match.Groups[1].Value;
return source_text != string.Empty;
}
}
}
<file_sep>using lab_2.Model;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace lab_2.ViewModel
{
class CryptVM
{
ICipherModel coder = new AtbashModel();
private string TrueLettersOnle(string text)
{
string language = string.Empty;
string new_text = string.Empty;
if (Properties.Settings.Default.Language == "rus")
language = Letters.EnglishLetters;
else if (Properties.Settings.Default.Language == "eng")
language = Letters.RussianLetters;
foreach (var symbol in text)
if (!language.Contains(symbol.ToString().ToLower()))
new_text += symbol;
return new_text;
}
public string EncryptText(out string new_text, out string new_key, string text, string key)
{
new_text = TrueLettersOnle(text);
new_key = TrueLettersOnle(key);
if (Properties.Settings.Default.Сipher == "atbash")
coder = new AtbashModel();
else if (Properties.Settings.Default.Сipher == "vigenere")
coder = new VigenereModel();
return coder.Encode(new_text, new_key);
}
public string DecryptText(out string new_text, out string new_key, string text, string key)
{
new_text = TrueLettersOnle(text);
new_key = TrueLettersOnle(key);
if (Properties.Settings.Default.Сipher == "atbash")
coder = new AtbashModel();
else if (Properties.Settings.Default.Сipher == "vigenere")
coder = new VigenereModel();
return coder.Decode(new_text, new_key);
}
public bool ReadFile(string path, out string ciphertext, out string sourcetext) => FileModel.TryReadArray(path, out ciphertext, out sourcetext);
public void WriteFile(string path, string ciphertext, string sourcetext) => FileModel.WriteFile(path, ciphertext, sourcetext);
public void WriteFile(string path, string ciphertext) => FileModel.WriteFile(path, ciphertext);
public string HelloMessage() => HelloModel.Message;
}
}
<file_sep>using lab_2.Model;
using Microsoft.VisualStudio.TestTools.UnitTesting;
namespace Lab_2_Test
{
[TestClass]
public class lab_2_test
{
[TestMethod]
public void VigenereTestEngEncode()
{
lab_2.Properties.Settings.Default.Language = "eng";
string TestedText = "My heart and actions are utterly unclouded. They are all those of 'Justice'. You're experienced it, didnt you?";
string TestedKeyword = "Key";
string ExpectedText = "wc foepd eln eadmmxw ybi sdxcbpw eravssnib. dlci epo ejv xfywc yj 'hewrsgc'. iss'bi chtcbmcxgcn mr, nmbxx wyy?";
VigenereModel VG = new VigenereModel();
Assert.AreEqual(ExpectedText.ToLower(), VG.Encode(TestedText, TestedKeyword));
}
[TestMethod]
public void VigenereTestRusEncode()
{
lab_2.Properties.Settings.Default.Language = "rus";
string TestedText = "«Знаешь что, Карло? Последние десять лет я только убивал и всё. Я убивал за свою страну. Убивал за свою семью. Убивал каждого, кто переходил мне дорогу. Но это… Это для меня»";
string TestedKeyword = "ключ";
string ExpectedText = "«тщюэгё фйщ, цюзць? мёэчвъшфв ъпюъйе чвй и ялгецл клфычц ф ыир. й ршунюг тл ощщи ойълкк. ямёщкч еч энлф эрйсз. ыяамли вктбёнь, зйщ ъвзпблъуч йеп плзщор. ещ зпё… жял ъцй йэшй»";
VigenereModel VG = new VigenereModel();
Assert.AreEqual(ExpectedText.ToLower(), VG.Encode(TestedText, TestedKeyword));
}
public void VigenereTestEngDecode()
{
lab_2.Properties.Settings.Default.Language = "eng";
string TestedText = "wc foepd eln eadmmxw ybi sdxcbpw eravssnib. dlci epo ejv xfywc yj 'hewrsgc'. iss'bi chtcbmcxgcn mr, nmbxx wyy?";
string TestedKeyword = "Key";
string ExpectedText = "my heart and actions are utterly unclouded. they are all those of 'justice'. you're experienced it, didnt you?";
VigenereModel VG = new VigenereModel();
Assert.AreEqual(ExpectedText.ToLower(), VG.Decode(TestedText, TestedKeyword));
}
[TestMethod]
public void VigenereTestRusDecode()
{
lab_2.Properties.Settings.Default.Language = "rus";
string TestedText = "«тщюэгё фйщ, цюзць? мёэчвъшфв ъпюъйе чвй и ялгецл клфычц ф ыир. й ршунюг тл ощщи ойълкк. ямёщкч еч энлф эрйсз. ыяамли вктбёнь, зйщ ъвзпблъуч йеп плзщор. ещ зпё… жял ъцй йэшй»";
string TestedKeyword = "ключ";
string ExpectedText = "«знаешь что, карло? последние десять лет я только убивал и всё. я убивал за свою страну. убивал за свою семью. убивал каждого, кто переходил мне дорогу. но это… это для меня»";
VigenereModel VG = new VigenereModel();
Assert.AreEqual(ExpectedText.ToLower(), VG.Decode(TestedText, TestedKeyword));
}
[TestMethod]
public void AtbashTestEngEncode()
{
lab_2.Properties.Settings.Default.Language = "eng";
string TestedText = "What's the matter...? Are you simply going to watch? Are you forsaking him to save yourself?";
string ExpectedText = "dszg'h gsv nzggvi...? ziv blf hrnkob tlrmt gl dzgxs? ziv blf ulihzprmt srn gl hzev blfihvou?";
AtbashModel AB = new AtbashModel();
Assert.AreEqual(ExpectedText.ToLower(), AB.Encode(TestedText, null));
}
[TestMethod]
public void AtbashTestRusEncod()
{
lab_2.Properties.Settings.Default.Language = "rus";
string ExpectedText = "Сойдись со мной в поединке! - Сначала со своими сапогами сойдись... Недомерок...";
string TestedText = "нрхъцне нр тсрх ю прьъцсфь! - нсызыуы нр нюрцтц ныпрэытц нрхъцне... сьъртьорф...";
AtbashModel AB = new AtbashModel();
Assert.AreEqual(ExpectedText.ToLower(), AB.Encode(TestedText, null));
}
[TestMethod]
public void AtbashTestEngDecod()
{
lab_2.Properties.Settings.Default.Language = "eng";
string TestedText = "dszg'h gsv nzggvi...? ziv blf hrnkob tlrmt gl dzgxs? ziv blf ulihzprmt srn gl hzev blfihvou?";
string ExpectedText = "what's the matter...? are you simply going to watch? are you forsaking him to save yourself?";
AtbashModel AB = new AtbashModel();
Assert.AreEqual(ExpectedText.ToLower(), AB.Decode(TestedText, null));
}
[TestMethod]
public void AtbashTestRusDecod()
{
lab_2.Properties.Settings.Default.Language = "rus";
string ExpectedText = "нрхъцне нр тсрх ю прьъцсфь! - нсызыуы нр нюрцтц ныпрэытц нрхъцне... сьъртьорф...";
string TestedText = "сойдись со мной в поединке! - сначала со своими сапогами сойдись... недомерок...";
AtbashModel AB = new AtbashModel();
Assert.AreEqual(ExpectedText.ToLower(), AB.Decode(TestedText, null));
}
}
}<file_sep>using System.Linq;
namespace lab_2.Model
{
public class AtbashModel : ICipherModel
{
string rus_letters = Letters.RussianLetters;
string eng_letters = Letters.EnglishLetters;
string rev_rus_letters = new string(Letters.RussianLetters.Reverse().ToArray());
string rev_eng_letters = new string(Letters.EnglishLetters.Reverse().ToArray());
private string EncodeOrDecode(string text, string letters, string сipher)
{
text = text.ToLower();
string result_text = string.Empty;
for (int i = 0; i < text.Length; i++)
{
int index = letters.IndexOf(text[i]);
if (!char.IsLetter(text[i]))
result_text += text[i];
else if (index >= 0)
result_text += сipher[index].ToString();
}
return result_text;
}
public string Encode(string text, string keyword = "")
{
if (Properties.Settings.Default.Language == "rus")
return EncodeOrDecode(text, rus_letters, rev_rus_letters);
else if (Properties.Settings.Default.Language == "eng")
return EncodeOrDecode(text, eng_letters, rev_eng_letters);
return string.Empty;
}
public string Decode(string text, string keyword = "")
{
if (Properties.Settings.Default.Language == "rus")
return EncodeOrDecode(text, rev_rus_letters, rus_letters);
else if (Properties.Settings.Default.Language == "eng")
return EncodeOrDecode(text, rev_eng_letters, eng_letters);
return string.Empty;
}
}
}
<file_sep>using System;
namespace lab_2.Model
{
public class VigenereModel : ICipherModel
{
string rus_letters = Letters.RussianLetters;
string eng_letters = Letters.EnglishLetters;
string letters;
public string Encode(string text, string Keyword)
{
if (Keyword == string.Empty)
return string.Empty;
if (Properties.Settings.Default.Language == "rus")
letters = rus_letters;
else if (Properties.Settings.Default.Language == "eng")
letters = eng_letters;
int LettersLength = letters.Length;
text = text.ToLower();
Keyword = Keyword.ToLower();
string ResultedText = string.Empty;
int index = 0;
foreach (char el in text)
{
if (!char.IsLetter(el))
{
ResultedText += el;
continue;
}
int сurrent_сharacter = (Array.IndexOf(letters.ToCharArray(), el) +
Array.IndexOf(letters.ToCharArray(), Keyword[index])) % LettersLength;
ResultedText += letters[сurrent_сharacter];
if ((index + 1) == Keyword.Length)
index = 0;
else
index++;
}
return ResultedText;
}
public string Decode(string Text, string Keyword)
{
if (Properties.Settings.Default.Language == "rus")
letters = rus_letters;
else if (Properties.Settings.Default.Language == "eng")
letters = eng_letters;
int LettersLength = letters.Length;
Text = Text.ToLower();
Keyword = Keyword.ToLower();
string ResultedText = "";
int KeywordIndex = 0;
foreach (char Symbol in Text)
{
if (!char.IsLetter(Symbol))
{
ResultedText += Symbol;
continue;
}
int CurrentCharacter = (Array.IndexOf(letters.ToCharArray(), Symbol) + LettersLength -
Array.IndexOf(letters.ToCharArray(), Keyword[KeywordIndex])) % LettersLength;
ResultedText += letters[CurrentCharacter];
if ((KeywordIndex + 1) == Keyword.Length)
KeywordIndex = 0;
else
KeywordIndex++;
}
return ResultedText.ToLower();
}
}
}
<file_sep>
namespace lab_2.Model
{
interface ICipherModel
{
string Encode(string input_text, string key);
string Decode(string input_text, string key);
}
}
<file_sep>
namespace lab_2.Model
{
static class Letters
{
public static string RussianLetters { get { return "абвгдеёжзийклмнопрстуфхцчшщьъэюяы"; } }
public static string EnglishLetters { get { return "abcdefghijklmnopqrstuvwxyz"; } }
}
}<file_sep>using System;
namespace lab_2.Model
{
static class HelloModel
{
public static string Message
{
get
{
return "Lab_2" + Environment.NewLine +
"Лабораторная работа №2" + Environment.NewLine +
"Методы шифрования" + Environment.NewLine +
"Написать программу для шифрования" + Environment.NewLine +
"текста двумя методами: " + Environment.NewLine +
"и Виженер" + Environment.NewLine +
"Студент группы 484" + Environment.NewLine +
"<NAME>" + Environment.NewLine +
"2020 год";
}
}
}
}
<file_sep>using lab_2.ViewModel;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Forms;
using MessageBox = System.Windows.Forms.MessageBox;
using Path = System.IO.Path;
using System;
namespace lab_2
{
public partial class MainWindow : Window
{
CryptVM VM = new CryptVM();
enum WriteSettings { SourceAndCipher, Source, Cipher }
public MainWindow()
{
InitializeComponent();
}
private void DoCoding()
{
string text = string.Empty;
string key = string.Empty;
if (rb_encode.IsChecked.Value)
tb_cipher_text.Text = VM.EncryptText(out text, out key, tb_plain_text.Text, tb_key.Text);
else
tb_cipher_text.Text = VM.DecryptText(out text, out key, tb_plain_text.Text, tb_key.Text);
tb_plain_text.Text = text;
tb_key.Text = key;
tb_key.CaretIndex = tb_key.Text.Length;
tb_plain_text.CaretIndex = tb_plain_text.Text.Length;
}
private void TextBox_TextChanged(object sender, TextChangedEventArgs e)
{
DoCoding();
}
private void ReadFileClick(object sender, RoutedEventArgs e)
{
string file_path = string.Empty;
string file_name = string.Empty;
using (OpenFileDialog openFileDialog = new OpenFileDialog())
{
openFileDialog.InitialDirectory = "c:\\";
openFileDialog.Filter = "txt files (*.txt)|*.txt|All files (*.*)|*.*";
openFileDialog.FilterIndex = 1;
openFileDialog.RestoreDirectory = true;
if (openFileDialog.ShowDialog() == System.Windows.Forms.DialogResult.OK)
{
file_path = openFileDialog.FileName;
file_name = Path.GetFileName(file_path);
}
}
string ciphertext = string.Empty;
string sourcetext = string.Empty;
if (file_path != string.Empty)
{
if (VM.ReadFile(file_path, out ciphertext, out sourcetext))
{
tb_plain_text.Text = sourcetext;
tb_cipher_text.Text = ciphertext;
}
else
MessageBox.Show($"Файл \"{file_name}\" не содержит исходный текст");
}
}
private void WriteText(WriteSettings write_settings)
{
string file_path = string.Empty;
string file_name = string.Empty;
if (tb_plain_text.Text == string.Empty)
{
MessageBox.Show("Записать пустоту? Я не умею");
return;
}
using (SaveFileDialog saveFileDialog1 = new SaveFileDialog())
{
saveFileDialog1.Filter = "txt files (*.txt)|*.txt|All files (*.*)|*.*";
saveFileDialog1.FilterIndex = 1;
saveFileDialog1.RestoreDirectory = true;
if (saveFileDialog1.ShowDialog() == System.Windows.Forms.DialogResult.OK)
{
if (!string.IsNullOrEmpty(file_path = saveFileDialog1.FileName))
{
file_name = Path.GetFileName(file_path);
if(write_settings == WriteSettings.SourceAndCipher)
VM.WriteFile(file_path, tb_cipher_text.Text, tb_plain_text.Text);
else if (write_settings == WriteSettings.Cipher)
VM.WriteFile(file_path, tb_cipher_text.Text);
else
VM.WriteFile(file_path, string.Empty, tb_plain_text.Text);
MessageBox.Show($"Файл \"{file_name}\" успешно записан", file_name);
}
}
}
}
private void WriteSourceAndCipherTextClick(object sender, RoutedEventArgs e)
{
WriteText(WriteSettings.SourceAndCipher);
}
private void WriteSourceTextClick(object sender, RoutedEventArgs e)
{
WriteText(WriteSettings.Source);
}
private void WriteCipherTextClick(object sender, RoutedEventArgs e)
{
WriteText(WriteSettings.Cipher);
}
private void HelloMessageShow(object sender, RoutedEventArgs e)
{
if (Properties.Settings.Default.HelloShow)
{
var result = MessageBox.Show(VM.HelloMessage() + Environment.NewLine + "Показывать это окно в дальнейшем?", "О программе", MessageBoxButtons.YesNo, MessageBoxIcon.Information);
Properties.Settings.Default.HelloShow = result == System.Windows.Forms.DialogResult.Yes;
Properties.Settings.Default.Save();
}
}
private void HelloWindow(object sender, RoutedEventArgs e)
{
var result = MessageBox.Show(VM.HelloMessage() + Environment.NewLine + "Показывать это окно в дальнейшем?", "О программе", MessageBoxButtons.YesNo, MessageBoxIcon.Information);
Properties.Settings.Default.HelloShow = result == System.Windows.Forms.DialogResult.Yes;
Properties.Settings.Default.Save();
}
private void ApplySettings()
{
if (rb_atbash == null || rb_encode == null || tb_cipher_text == null || tb_key == null)
return;
if (rb_eng.IsChecked.Value)
Properties.Settings.Default.Language = "eng";
else if (rb_rus.IsChecked.Value)
Properties.Settings.Default.Language = "rus";
if (rb_atbash.IsChecked.Value)
Properties.Settings.Default.Сipher = "atbash";
else if (rb_vigenere.IsChecked.Value)
Properties.Settings.Default.Сipher = "vigenere";
Properties.Settings.Default.Save();
DoCoding();
}
private void tb_key_TextChanged(object sender, TextChangedEventArgs e)
{
ApplySettings();
}
private void rb_rus_Checked(object sender, RoutedEventArgs e)
{
ApplySettings();
}
private void rb_eng_Checked(object sender, RoutedEventArgs e)
{
ApplySettings();
}
private void rb_vigenere_Checked(object sender, RoutedEventArgs e)
{
ApplySettings();
}
private void rb_atbash_Checked(object sender, RoutedEventArgs e)
{
ApplySettings();
}
private void rb_encode_Checked(object sender, RoutedEventArgs e)
{
ApplySettings();
}
private void rb_decode_Checked(object sender, RoutedEventArgs e)
{
ApplySettings();
}
}
}
|
c9972ba4ae5646462c1cbbd6984556bbf83a2cdd
|
[
"C#"
] | 9 |
C#
|
garinich777/lab_2
|
f1cffdc002302484d7d7deea64ef7ee30f474427
|
b9d3b809d814d73cc5dd63cb1c55f0478b0368d9
|
refs/heads/master
|
<repo_name>tonytamsf/AndroidSimpleTwitterClient<file_sep>/src/com/codepath/apps/restclienttemplate/LoginActivity.java
package com.codepath.apps.restclienttemplate;
import java.util.ArrayList;
import org.json.JSONArray;
import org.json.JSONObject;
import android.content.Intent;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
import android.view.View;
import com.codepath.apps.restclienttemplate.models.Tweet;
import com.codepath.oauth.OAuthLoginActivity;
import com.loopj.android.http.JsonHttpResponseHandler;
public class LoginActivity extends OAuthLoginActivity<TwitterClient> {
@Override
protected void onCreate(Bundle savedInstanceState) {
Log.d("DEBUG", "onCreate");
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_login);
}
// Inflate the menu; this adds items to the action bar if it is present.
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.login, menu);
return true;
}
// OAuth authenticated successfully, launch primary authenticated activity
// i.e Display application "homepage"
@Override
public void onLoginSuccess() {
Log.d("DEBUG", "onLoginSuccess");
getClient().getHomeTimeline(new JsonHttpResponseHandler() {
/* (non-Javadoc)
* @see com.loopj.android.http.JsonHttpResponseHandler#onSuccess(org.json.JSONArray)
*/
@Override
public void onSuccess(JSONArray jsonTweets) {
//jsonTweets.get(0).getString("text");
//jsonTweets.get(0).getJSONObject("user");
Log.d("DEBUG", jsonTweets.toString());
// Move on to TimelineActivity
Intent i = new Intent(getApplicationContext(),
TimelineActivity.class);
i.putExtra("jsonString", jsonTweets.toString());
startActivity(i);
}
@Override
public void onFailure(Throwable arg0, JSONObject arg1) {
super.onFailure(arg0, arg1);
}
});
}
// OAuth authentication flow failed, handle the error
// i.e Display an error dialog or toast
@Override
public void onLoginFailure(Exception e) {
Log.d("DEBUG", "onLoginFailure");
e.printStackTrace();
}
// Click handler method for the button used to start OAuth flow
// Uses the client to initiate OAuth authorization
// This should be tied to a button used to login
public void loginToRest(View view) {
getClient().connect();
}
}
|
177fdf72a3b03a5ae58f59aecad2ff6be1256815
|
[
"Java"
] | 1 |
Java
|
tonytamsf/AndroidSimpleTwitterClient
|
96f9e077ca2ddc98b39d0cc5e03f86655b9349e4
|
325ad4c5f6793069ecbc70be4a87d3a118ad0f40
|
refs/heads/master
|
<repo_name>AfonsohsS/CloudKitDash<file_sep>/CloudKitDash/Controllers/CitiesTVController.swift
//
// CitiesTVController.swift
// CloudKitDash
//
// Created by <NAME> on 25/05/19.
// Copyright © 2019 <NAME>. All rights reserved.
//
// All code in this project is come from the book "iOS Apps for Masterminds"
// You can know more about that in http://www.formasterminds.com
import UIKit
import CloudKit
import CoreData
class CitiesTVController: UITableViewController, NSFetchedResultsControllerDelegate {
var context: NSManagedObjectContext!
//The viewControllers defines this fetch object to get the objects from the
//persistent store and feed the table view
var fetchedController: NSFetchedResultsController<Cities>!
var selectedCountry: Countries!
override func viewDidLoad() {
super.viewDidLoad()
///NÃO ESQUECER DE BUSCAR ALTERNATIVA MELHOR PARA ISSO
let app = UIApplication.shared
let appDelegate = app.delegate as! AppDelegate
context = appDelegate.context
if selectedCountry != nil {
let request: NSFetchRequest<Cities> = Cities.fetchRequest()
request.predicate = NSPredicate(format: "country = %@", selectedCountry)
let sort = NSSortDescriptor(key: "name", ascending: true)
request.sortDescriptors = [sort]
fetchedController = NSFetchedResultsController(fetchRequest: request, managedObjectContext: context, sectionNameKeyPath: nil, cacheName: nil)
fetchedController.delegate = self
do {
try fetchedController.performFetch()
} catch {
print("Error Fetching Data in CitiesTVController viewDidLoad()")
}
}
}
@IBAction func addCity(_ sender: UIBarButtonItem) {
present(AppData.setAlert(type: "Cities", title: "Insert City", style: .alert, message: "Add a new city to the list", selectedCountry: self.selectedCountry), animated: true)
}
@IBAction func editCity(_ sender: UIBarButtonItem) {
if tableView.isEditing {
tableView.setEditing(false, animated: true)
} else {
tableView.setEditing(true, animated: true)
}
}
//Method Prepare to go to PictureVC
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
if segue.identifier == "showPicture" {
let vc = segue.destination as! PictureViewController
if let indexParth = self.tableView.indexPathForSelectedRow {
let city = fetchedController.object(at: indexParth)
vc.selectedCity = city
}
}
}
// MARK: - Table view data source
override func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
if let sections = fetchedController.sections {
let sectionInfo = sections[section]
return sectionInfo.numberOfObjects
}
return 0
}
override func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
let cell = tableView.dequeueReusableCell(withIdentifier: "citiesCell", for: indexPath)
let city = fetchedController.object(at: indexPath)
cell.textLabel?.text = city.name
return cell
}
func controller(_ controller: NSFetchedResultsController<NSFetchRequestResult>, didChange anObject: Any, at indexPath: IndexPath?, for type: NSFetchedResultsChangeType, newIndexPath: IndexPath?) {
switch type {
case .delete:
if let path = indexPath {
tableView.deleteRows(at: [path], with: .fade)
}
case .insert:
if let path = newIndexPath {
tableView.insertRows(at: [path], with: .fade)
}
case .update:
if let path = indexPath {
let cell = tableView.cellForRow(at: path)
let city = fetchedController.object(at: path)
cell?.textLabel?.text = city.name
}
default:
break
}
}
func controllerWillChangeContent(_ controller: NSFetchedResultsController<NSFetchRequestResult>) {
tableView.beginUpdates()
}
func controllerDidChangeContent(_ controller: NSFetchedResultsController<NSFetchRequestResult>) {
tableView.endUpdates()
}
override func tableView(_ tableView: UITableView, commit editingStyle: UITableViewCell.EditingStyle, forRowAt indexPath: IndexPath) {
if editingStyle == .delete {
let city = fetchedController.object(at: indexPath)
//Add a CKDelete object to the persistent store with the information about the record
//we have to delete from CloudKit, and then delete the Countries object from Core Data
let newItem = CKDelete(context: context)
newItem.zoneName = "listPlaces"
newItem.recordName = city.ckRecordName
context.delete(city)
do {
try self.context.save()
AppData.removeRecords()
} catch {
print("Error Deleting City")
}
tableView.setEditing(false, animated: true)
}
}
}
<file_sep>/CloudKitDash/Controllers/PictureViewController.swift
//
// PictureViewController.swift
// CloudKitDash
//
// Created by <NAME> on 26/05/19.
// Copyright © 2019 <NAME>. All rights reserved.
//
// All code in this project is come from the book "iOS Apps for Masterminds"
// You can know more about that in http://www.formasterminds.com
import UIKit
import CloudKit
class PictureViewController: UIViewController {
@IBOutlet weak var cityPicture: UIImageView!
var selectedCity: Cities!
override func viewDidLoad() {
super.viewDidLoad()
if selectedCity != nil {
if let picture = selectedCity.picture {
cityPicture.image = UIImage(data: picture)
}
}
}
}
<file_sep>/CloudKitDash/Models/CloudErrors.swift
//
// CloudErrors.swift
// CloudKitDash
//
// Created by <NAME> on 30/05/19.
// Copyright © 2019 <NAME>. All rights reserved.
//
import CloudKit
class CloudErrors {
static func processErrors(error: CKError) {
let userSettings = UserDefaults.standard
switch error.code {
case .notAuthenticated:
userSettings.set(false, forKey: "iCloudAvailable")
case .changeTokenExpired:
//Remove tokens values from userDefaults
userSettings.removeObject(forKey: "changeToken")
userSettings.removeObject(forKey: "changeZoneToken")
//Execute checkUpdates() after 30 seconds
Timer.scheduledTimer(timeInterval: 30, target: self, selector: #selector(AppData.checkUpdates(finishClosure:)), userInfo: nil, repeats: false)
case .quotaExceeded:
///Warn the user by an Alert
print("There is no space in your iCloud Account")
/*
The way errors provide additional information is through a dictionary assigned to
their userInfo property (an NSDictionary object). To get the records that failed,
we have to read the value in this dictionary with the key CKPartialErrorsByItemIDKey.
In turn, this key returns another NSDictionary object with a list of the record IDs of
the records that failed and the corresponding error each operation returned.
*/
case .partialFailure:
if let listErrors = error.userInfo[CKPartialErrorsByItemIDKey] as? NSDictionary {
var listFailedRecords: [CKRecord] = []
for (_, error) in listErrors {
if let recordError = error as? CKError {
//The process of uploading a record may failed for different reasons.
//If we want to get the records that failed because of a tag mismatch,
//we have to read the CKRecordChangedErrorServerRecordKey key of the userInfo
//dictionary of each error.
if let record = recordError.userInfo[CKRecordChangedErrorServerRecordKey] as? CKRecord {
listFailedRecords.append(record)
}
}
}
AppData.uploadFailedRecords(failedRecords: listFailedRecords)
}
case .networkFailure:
///Warn the user by an Alert
print("Network Failure")
case .networkUnavailable:
///Warn the user by an Alert
print("Network Unavailable")
default:
print("Error: \(error.code)")
break
}
}
}
<file_sep>/CloudKitDash/Controllers/Old/OldCitiesTVController.swift
//
// OldCitiesTVController.swift
// CloudKitDash
//
// Created by <NAME> on 25/05/19.
// Copyright © 2019 <NAME>. All rights reserved.
//
// All code in this project is come from the book "iOS Apps for Masterminds"
// You can know more about that in http://www.formasterminds.com
//import UIKit
//
//class OldCitiesTVController: UITableViewController {
//
// override func viewDidLoad() {
// super.viewDidLoad()
//
// //Observer to Update Interface to call updateInterface()
// let notCenter = NotificationCenter.default
// let name = Notification.Name("Update Interface")
// notCenter.addObserver(self, selector: #selector(updateInterface(notification:)), name: name, object: nil)
//
// //Read and show the values when the view is loaded
// //The method performs a query on the database to get all the countries
// //available and posts a notification when it is over
// AppData.readCities()
//
// //Note: This is why we register the observer for the notification before calling the method
//
// }
//
// @objc func updateInterface(notification: Notification) {
// tableView.reloadData()
// }
//
// @IBAction func addCity(_ sender: UIBarButtonItem) {
// present(AppData.setAlert(type: "City", title: "Insert City", style: .alert, message: "Add a new city to the list"), animated: true)
// }
//
// @IBAction func editCity(_ sender: UIBarButtonItem) {
//
// }
//
//
// //Method Prepare to go to PictureVC
// override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
// if segue.identifier == "showPicture" {
// if let indexParth = self.tableView.indexPathForSelectedRow {
// let vc = segue.destination as! PictureViewController
// vc.selectedCity = AppData.listCities[indexParth.row]
// }
// }
// }
//
//
// // MARK: - Table view data source
//
// override func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
// // #warning Incomplete implementation, return the number of rows
// return AppData.listCities.count
// }
//
//
// override func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
// let cell = tableView.dequeueReusableCell(withIdentifier: "citiesCell", for: indexPath)
//
// let record = AppData.listCities[indexPath.row]
//
// if let name = record["cityName"] as? String {
// cell.textLabel?.text = name
// }
//
// return cell
// }
//
//}
<file_sep>/CloudKitDash/Resources/AppDelegate.swift
//
// AppDelegate.swift
// CloudKitDash
//
// Created by <NAME> on 25/05/19.
// Copyright © 2019 <NAME>. All rights reserved.
//
// All code in this project is come from the book "iOS Apps for Masterminds"
// You can know more about that in http://www.formasterminds.com
import UIKit
import CloudKit
import CoreData
@UIApplicationMain
class AppDelegate: UIResponder, UIApplicationDelegate {
var window: UIWindow?
var container: NSPersistentContainer!
var context: NSManagedObjectContext!
func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool {
/*
Note: Subscriptions only report changes in customs zones.
Therefore, if we want to receive notifications, besidescreating the
subscription we also have to create a record zone and store all our
records in it. This is why, the first thing we do when the application
is launched, is to store two Boolean values in the User Defaults database
called "subscriptionSaved", "zoneCreated" and "iCloudAvailable". These
values will be used later to know whether or not we have already created
the subscription, the custom zone and iCloud Account is available.
*/
let userSettings = UserDefaults.standard
let values = ["subscriptionSaved" : false, "zoneCreated" : false, "iCloudAvailable" : false]
userSettings.register(defaults: values)
//Setting CoreData's container and context
container = NSPersistentContainer(name: "Places")
container.loadPersistentStores { (storeDescription, error) in
if error != nil {
print("Error loading data")
} else {
self.context = self.container.viewContext
}
}
//Register the application with icloud servers
application.registerForRemoteNotifications()
/*
Every time we want to perform an operation on the servers, we can check the value
of the iCloudAvailable key to know whether an iCloud account is available.
*/
let containerCloudKit = CKContainer.default()
containerCloudKit.accountStatus { (status, error) in
//If iCloud Account is available, check for Updates
if status == CKAccountStatus.available {
let mainQueue = OperationQueue.main
mainQueue.addOperation {
userSettings.set(true, forKey: "iCloudAvailable")
AppData.checkUpdates(finishClosure: { (result) in
return
})
}
} else {
print("Error Cloud Connection")
}
}
AppData.configureDatabase {}
return true
}
func application(_ application: UIApplication, didReceiveRemoteNotification userInfo: [AnyHashable : Any], fetchCompletionHandler completionHandler: @escaping (UIBackgroundFetchResult) -> Void) {
//Check whether the notification received is of type CKDatabaseNotification, sent by a CloudKit server,
//creating a CKNotification
let notification = CKNotification(fromRemoteNotificationDictionary: userInfo) as? CKDatabaseNotification
/*
Note: We call the closure to tell the system that the process is over,
but because the operations are performed asynchronously, we can't do it
until they are finished. This way we send a closure to the method we use
to download the new information which sole purpose is to execute the
completionHandler closure with the value "result" returned by the operations.
*/
if notification != nil {
AppData.checkUpdates { (result) in
let mainQueue = OperationQueue.main
mainQueue.addOperation {
//Tell to excute closure only with process is over.
//Por isso usamos o result na sua execução.
completionHandler(result)
}
}
}
}
}
<file_sep>/README.md
# CloudKitDash
This application is an example of using CloudKit's basic classes and methods.
If you want more details about this example or want to know more about CloudKit,
you can find more information in http://www.formasterminds.com
iOS Apps for Masterminds
4th Edition
ISBN: 978-1724466440
<file_sep>/CloudKitDash/Models/ApplicationData.swift
//
// ApplicationData.swift
// CloudKitDash
//
// Created by <NAME> on 25/05/19.
// Copyright © 2019 <NAME>. All rights reserved.
//
// All code in this project is come from the book "iOS Apps for Masterminds"
// You can know more about that in http://www.formasterminds.com
import UIKit
import CloudKit
import CoreData
//Enum to the Entities Name
enum Type: String {
case Countries
case Cities
}
class ApplicationData {
//MARK: - Property to store the data locally
///Store a reference to the CloudKit's database
var database: CKDatabase!
///It's the reference to knbow which country has been selected by the user at any given moment
var selectedCountry: CKRecord.ID!
///Array with the list of the countries already inserted in the database
var listCountries: [CKRecord] = []
///Array with the list of the cities available for a specific country
var listCities: [CKRecord] = []
///Reference to the CoreData Context
var context: NSManagedObjectContext!
init() {
let app = UIApplication.shared
let appDelegate = app.delegate as! AppDelegate
context = appDelegate.context
//Get the container from PrivateCloudDatabase
let container = CKContainer.default()
database = container.privateCloudDatabase
}
//MARK: - Model Functions to Insert Data
///Method to insert a new Country (Avaliar se Continua)
func insertCountry(name: String) {
let text = name.trimmingCharacters(in: .whitespaces)
if !text.isEmpty {
let newCountry = Countries(context: self.context)
newCountry.name = name
newCountry.ckUpload = true
let recordName = "idcountry-\(UUID())"
//As we know subscriptions require the records to be stored in a custom zone.
// So we have to create one.
let zone = CKRecordZone(zoneName: "listPlaces")
//Create a unique value ID for the Countries in a specific custom zone
let id = CKRecord.ID(recordName: recordName, zoneID: zone.zoneID)
//Create a record object of type Countries
let record = CKRecord(recordType: Type.Countries.rawValue, recordID: id)
let coder = NSKeyedArchiver(requiringSecureCoding: true)
record.encodeSystemFields(with: coder)
//Every time the user inserts a new country, we have to create a temporary CKRecord object
//and encode its metadata with an NSKeyedArchiver object.
let metadata = coder.encodedData
newCountry.ckMetadata = metadata
newCountry.ckRecordName = recordName
do {
try self.context.save()
self.uploadRecords()
} catch {
print("Error Saving New Country")
}
}
}
///Method to insert a new City (Avaliar se Continua)
func insertCity(name: String, selectedCountry: Countries) {
let text = name.trimmingCharacters(in: .whitespaces)
if !text.isEmpty {
let newCity = Cities(context: self.context)
newCity.name = name
newCity.country = selectedCountry
newCity.ckUpload = true
newCity.ckPicture = true
newCity.ckReference = selectedCountry.ckRecordName
if let picture = UIImage(named: "Vancouver.jpg") {
newCity.picture = picture.pngData()
}
let recordName = "idcity-\(UUID())"
//As we know subscriptions require the records to be stored in a custom zone.
// So we have to create one.
let zone = CKRecordZone(zoneName: "listPlaces")
//Create a unique value ID for the City in a specific custom zone
let id = CKRecord.ID(recordName: recordName, zoneID: zone.zoneID)
//Create a record object of type Cities
let record = CKRecord(recordType: Type.Cities.rawValue, recordID: id)
let coder = NSKeyedArchiver(requiringSecureCoding: true)
record.encodeSystemFields(with: coder)
let metadata = coder.encodedData
newCity.ckMetadata = metadata
newCity.ckRecordName = recordName
do {
try self.context.save()
self.uploadRecords()
} catch {
print("Error Saving New Country")
}
}
}
//MARK: - Model Functions to Read Data
///Mehtod to read Countries
func readCountries() {
//Ask the server to only look for Countries
//TRUEPREDICATE keyword determines that the predicate will always return
//the value true, so we get back all the records available
let predicate = NSPredicate(format: "TRUEPREDICATE")
let query = CKQuery(recordType: Type.Countries.rawValue, predicate: predicate)
database.perform(query, inZoneWith: nil) { (records, error) in
if error != nil {
print("ERROR: Registro de Country não encontrado")
} else if let list = records {
//If there is no error, add the records received to the Array
self.listCountries = []
for record in list {
self.listCountries.append(record)
}
self.updateInterface()
}
}
}
///Method to read Cities
func readCities() {
//Getting the list of cities that belong to the country selected by the user
if selectedCountry != nil {
let predicate = NSPredicate(format: "country = %@", selectedCountry)
let query = CKQuery(recordType: Type.Cities.rawValue, predicate: predicate)
database.perform(query, inZoneWith: nil) { (records, error) in
if error != nil {
print("ERROR: Registro de City não encontrado")
} else if let list = records {
self.listCities = []
for record in list {
self.listCities.append(record)
}
self.updateInterface()
}
}
}
}
//MARK: - Update Interface
///The purpose of this method is to tell the rest of the application that new information is available and should be shown to the user.
func updateInterface() {
let main = OperationQueue.main
main.addOperation {
let center = NotificationCenter.default
let name = Notification.Name("Update Interface")
center.post(name: name, object: nil, userInfo: nil)
}
}
///Alert Template for Country and City
func setAlert(type: String, title: String, style: UIAlertController.Style, message: String?, selectedCountry: Countries?) -> UIAlertController {
let alert = UIAlertController(title: title, message: message, preferredStyle: style)
let cancel = UIAlertAction(title: "Cancel", style: .cancel, handler: nil)
alert.addAction(cancel)
let action = UIAlertAction(title: "Save", style: .default) { (action) in
if let fields = alert.textFields {
let name = fields[0].text!
if type == Type.Countries.rawValue {
self.insertCountry(name: name)
} else if type == Type.Cities.rawValue {
self.insertCity(name: name, selectedCountry: selectedCountry!)
} else {
print("Type não indentificado")
}
}
}
alert.addAction(action)
alert.addTextField(configurationHandler: nil)
return alert
}
//MARK: - Local Data Manipulation - Downloading Data
///This method is called after all the records are downloaded from CloudKit.
//Here, we have to read each record, extract the information, and store it in the persistent store.
func updateLocalRecords(listRecordsUpdated: [CKRecord]) {
for record in listRecordsUpdated {
//we first get the record's metadata.
//Because this is a collection of values, the framework offers a convenient
//method for this purpose called encodeSystemFields().
//This method takes an NSKeyedArchiver object to encode all the values at once
//and produces a Data structure we can get from the object's encodedData property.
let coder = NSKeyedArchiver(requiringSecureCoding: true)
record.encodeSystemFields(with: coder)
let recordMetadata = coder.encodedData
//Note: With this Data structure containing the record's metadata, the record's type,
// and its identifier (record's name), we have all the information we need to insert
// the record in the persistent store.
let recordType = record.recordType
let recordName = record.recordID.recordName
//Check the Type (Countries or Cities)
if recordType == Type.Countries.rawValue {
//Create a predicate to look for a record with the same record name
//Note: This is not the name of the country or city but the name of the record we
// defined with the UUID() function when the record was created for the first time)
let request: NSFetchRequest<Countries> = Countries.fetchRequest()
request.predicate = NSPredicate(format: "ckRecordName = %@", recordName)
do {
var country: Countries!
let result = try context.fetch(request)
//If the record is not found (result is empty), we create a new Countries object
//and assign the record name to its ckRecordName property
if result.isEmpty {
country = Countries(context: context)
country.ckRecordName = recordName
} else {
country = result[0]
}
//Otherwise, we get the record and update its attributes
country.ckMetadata = recordMetadata
country.ckUpload = false
country.name = record["name"] as? String
} catch {
print("Error Fetching Data")
}
} else if recordType == Type.Cities.rawValue {
//Create a predicate to look for a record with the same record name
//Note: This is not the name of the country or city but the name of the record we
// defined with the UUID() function when the record was created for the first time)
let request: NSFetchRequest<Cities> = Cities.fetchRequest()
request.predicate = NSPredicate(format: "ckRecordName = %@", recordName)
do {
var city: Cities!
let result = try context.fetch(request)
//If the record is not found (result is empty), we create a new Countries object
//and assign the record name to its ckRecordName property
if result.isEmpty {
city = Cities(context: context)
city.ckRecordName = recordName
} else {
city = result[0]
}
//Otherwise, we get the record and update its attributes
city.ckMetadata = recordMetadata
city.ckUpload = false
city.ckPicture = false
city.name = record["name"] as? String
//Store the record name of the country the city belongs to (stored in the
//Reference object identified with the string "country")
if let reference = record["country"] as? CKRecord.Reference {
city.ckReference = reference.recordID.recordName
}
//Get the picture from the CKAsset.
if let asset = record["picture"] as? CKAsset {
let picture = UIImage(contentsOfFile: asset.fileURL!.path)
city.picture = picture?.pngData()
}
} catch {
print("Error Fetching Data")
}
}
}
//If there are changes in context, save it.
if context.hasChanges {
do {
try context.save()
} catch {
print("Error Saving Context")
}
}
}
///This method is called after records were deleted in CloudKit.
//Here, we have to find the records of the same type and with the same name
//in the persistent store and then use the delete() method to delete them.
func deleteLocalRecords(listRecordsDeleted: [String:String]) {
for (recordName, recordType) in listRecordsDeleted {
if recordType == Type.Countries.rawValue {
let request: NSFetchRequest<Countries> = Countries.fetchRequest()
request.predicate = NSPredicate(format: "ckRecordName = %@", recordName)
do {
let result = try context.fetch(request)
if !result.isEmpty {
let country = result[0]
context.delete(country)
}
} catch {
print("Error Fetching")
}
} else if recordType == Type.Cities.rawValue {
let request: NSFetchRequest<Cities> = Cities.fetchRequest()
request.predicate = NSPredicate(format: "ckRecordName = %@", recordName)
do {
let result = try context.fetch(request)
if !result.isEmpty {
let city = result[0]
context.delete(city)
}
} catch {
print("Error Fetching")
}
}
}
if context.hasChanges {
do {
try context.save()
} catch {
print("Error Saving Context")
}
}
}
///Read all the Cities objects that have a value stored in the ckreference attribute
///and connect them with their Countries object through their country attribute
func updateLocalReference() {
let requestCities: NSFetchRequest<Cities> = Cities.fetchRequest()
//We use the value of the ckreference attribute in the Cities objects to determine
//whether the reference was already created in Core Data or not.
requestCities.predicate = NSPredicate(format: "ckReference != nil")
//If the value is different than nil, we search for a country with that record name
//and if we find it, we assign it to the country property of the Cities object, thus
//defining the relationship and connecting the city with its country.
do {
let listCities = try context.fetch(requestCities)
for city in listCities {
let requestCountries: NSFetchRequest<Countries> = Countries.fetchRequest()
requestCountries.predicate = NSPredicate(format: "ckRecordName = %@", city.ckReference!)
do {
let listCountries = try context.fetch(requestCountries)
if !listCountries.isEmpty {
city.country = listCountries[0]
city.ckReference = nil
}
} catch {
print("Error Fetching")
}
}
} catch {
print("Error Fetching")
}
if context.hasChanges {
do {
try context.save()
} catch {
print("Error Saving Context")
}
}
}
//MARK: - Local Data Manipulation - Uploading Data
/*
Note: Uploading the records one by one is not appropriate when working with a local storage.
We have to anticipate problems in the connection or the operation that will prevent some records
from being uploaded to the server. This is why we decided to mark each object in the persistent
store with a Boolean value stored in the ckupload attribute. If the attribute is true, we know that
the record was not uploaded yet and therefore we can include it on the list. This way, if a process
failed before, we do it again until all the records are uploaded to the CloudKit database.
*/
///Methods to upload the records the user creates and delete those that the user removes.
//Uploading Multiple Records
func uploadRecords() {
let listRecordsToUpload = getRecordsToUpload()
if !listRecordsToUpload.isEmpty {
//We call the configureDatabase() method first to make sure the database is properly configured.
configureDatabase {
let operation = CKModifyRecordsOperation(recordsToSave: listRecordsToUpload, recordIDsToDelete: nil)
operation.modifyRecordsCompletionBlock = { (records, recordsID, error) in
if error != nil {
print("ERROR Uploading Records")
} else {
//Set the values of the ckupload attribute of each record in the persistent store
//back to false, so they are not uploaded again.
self.clearCoreDataRecord()
}
}
self.database.add(operation)
}
}
}
///Methods to process the failed records one by one.
func uploadFailedRecords(failedRecords: [CKRecord]) {
for record in failedRecords {
//Get the metadata from the record received from the server
//(the metadata includes the record's tag)
let coder = NSKeyedArchiver(requiringSecureCoding: true)
record.encodeSystemFields(with: coder)
let recordMetadata = coder.encodedData
let recordType = record.recordType
let recordName = record.recordID.recordName
if recordType == Type.Countries.rawValue {
let request: NSFetchRequest<Countries> = Countries.fetchRequest()
request.predicate = NSPredicate(format: "ckRecordName = %@", recordName)
//Updates the ckmetadata attribute of the corresponding object in the persistent store with this value
do {
let result = try context.fetch(request)
if !result.isEmpty {
let item = result[0]
item.ckMetadata = recordMetadata
}
} catch {
print("Error Fetching Country Record in uploadFailedRecords()")
}
} else if recordType == Type.Cities.rawValue {
let request: NSFetchRequest<Cities> = Cities.fetchRequest()
request.predicate = NSPredicate(format: "ckRecordName = %@", recordName)
do {
let result = try context.fetch(request)
if !result.isEmpty {
let item = result[0]
item.ckMetadata = recordMetadata
}
} catch {
print("Error Fetching City Record in uploadFailedRecords()")
}
}
}
//When all the objects in the persistent store are updated, we call the uploadRecords()
//method to upload everything again.
if context.hasChanges {
do {
try self.context.save()
uploadRecords()
} catch {
print("Error Saving Context in uploadFailedRecords()")
}
}
}
///Getting all the records in the persistent store that has to be uploaded
//The method looks for the Countries and Cities objects with the ckupload
//attribute equal to true, adds them to an array, and returns it.
func getRecordsToUpload() -> [CKRecord] {
/*
NOTE: We can't send Core Data objects to CloudKit, we first have to convert them
to CKRecord objects. The problem is that if we just create a new CKRecord object
every time the user modifies an existing record in the persistent store, the database
won't be able to match the records and will add a new record instead of modifying the old one
This is another reason why we included the ckmetadata attribute in every Countries and
Cities object with the metadata of the record. To allow the server to match the record
we are sending with those already stored in the database, we have to include the values
contained in this metadata, and this is the first thing we do with each record.
*/
var list: [CKRecord] = []
//First to Countries
let requestCountries: NSFetchRequest<Countries> = Countries.fetchRequest()
requestCountries.predicate = NSPredicate(format: "ckUpload = true")
do {
let result = try context.fetch(requestCountries)
for item in result {
var recordTemp: CKRecord!
//The metadata is unarchived with an NSKeyedUnarchiver object and we get
//a CKRecord object in return.
if let coder = try? NSKeyedUnarchiver(forReadingFrom: item.ckMetadata!) {
coder.requiresSecureCoding = true
//Returns a CKRecord object with the values
recordTemp = CKRecord(coder: coder)
coder.finishDecoding()
}
//Assign to this CKRecord object the rest of the values that were modified by the user
//(e.g., the name of the country)
if let record = recordTemp {
record.setObject(item.name! as NSString, forKey: "name")
//Add the record to the array
list.append(record)
}
}
} catch {
print("Error Fetching")
}
//Second to Cities
let requestCities: NSFetchRequest<Cities> = Cities.fetchRequest()
requestCities.predicate = NSPredicate(format: "ckUpload = true")
do {
let result = try context.fetch(requestCities)
for item in result {
var recordTemp: CKRecord!
if let coder = try? NSKeyedUnarchiver(forReadingFrom: item.ckMetadata!) {
coder.requiresSecureCoding = true
recordTemp = CKRecord(coder: coder)
coder.finishDecoding()
}
if let record = recordTemp {
record.setObject(item.name! as NSString, forKey: "name")
//Get the Picture
if item.ckPicture && item.picture != nil {
//Get URL of the Temp Directory
var url = URL(fileURLWithPath: NSTemporaryDirectory())
//Add the name of the file to the path
url = url.appendingPathComponent("tempFile.png")
//Create a temporary file with this data and then create the CKAsset
// object from the URL of this file
do {
let pngImage = item.picture!
//Store the data in the file
try pngImage.write(to: url, options: .atomic)
let asset = CKAsset(fileURL: url)
record.setObject(asset, forKey: "picture")
} catch {
print("Error Image Not Stored")
}
}
//Get the Zone and Reference
if let country = item.country {
let zone = CKRecordZone(zoneName: "listPlaces")
let id = CKRecord.ID(recordName: country.ckRecordName!, zoneID: zone.zoneID)
let reference = CKRecord.Reference(recordID: id, action: .deleteSelf)
record.setObject(reference, forKey: "country")
}
list.append(record)
}
}
} catch {
print("Error Fetching")
}
return list
}
///Cleaning up the objects in the persistent store
func clearCoreDataRecord() {
/*
When the operation finish uploading all the records, we have to change the values of the
ckupload attribute of each object to false, so they are not uploaded again.
Notice that for the Cities objects, we also have to change the value of the ckpicture
attribute to indicate that the pictures were also uploaded.
*/
//First fetch Countries and change ckUpload to false
let requestCountries: NSFetchRequest<Countries> = Countries.fetchRequest()
requestCountries.predicate = NSPredicate(format: "ckUpload = true")
do {
let result = try context.fetch(requestCountries)
for item in result {
item.ckUpload = false
}
} catch {
print("Error Fetching Countries in clearCoreDataRecord()")
}
//Second repeat the same fetch to Cities
let requestCities: NSFetchRequest<Cities> = Cities.fetchRequest()
requestCities.predicate = NSPredicate(format: "ckUpload = true")
do {
let result = try context.fetch(requestCities)
for item in result {
item.ckUpload = false
item.ckPicture = false
}
} catch {
print("Error Fetching Cities in clearCoreDataRecord()")
}
if context.hasChanges {
do {
try context.save()
} catch {
print("Error Saving Context in clearCoreDataRecord()")
}
}
}
///Erase the records in CloudKit that correspond to the objects removed by the user from the persistent store.
func removeRecords() {
/*
Create an object of type CKDelete with the record name of each of the Countries and Cities
objects deleted by the user and then remove them from the persistent store. This way,
to reflect the changes in the CloudKit database, we just have to call a method in our model
to erase the records that match the record names stored in the CKDelete entity.
*/
var listRecordsToDelete: [CKRecord.ID] = []
let request: NSFetchRequest<CKDelete> = CKDelete.fetchRequest()
do {
let result = try context.fetch(request)
for item in result {
let zone = CKRecordZone(zoneName: item.zoneName!)
let id = CKRecord.ID(recordName: item.recordName!, zoneID: zone.zoneID)
listRecordsToDelete.append(id)
}
} catch {
print("Error Fetching Record ID in removeRecords()")
}
if !listRecordsToDelete.isEmpty {
let operation = CKModifyRecordsOperation(recordsToSave: nil, recordIDsToDelete: listRecordsToDelete)
operation.modifyRecordsCompletionBlock = { (records, recordsID, error) in
if error != nil {
print("Error Releting Records in removeRecords()")
} else {
let request: NSFetchRequest<CKDelete> = CKDelete.fetchRequest()
do {
let result = try self.context.fetch(request)
for item in result {
self.context.delete(item)
}
try self.context.save()
} catch {
print("Error Deleting in removeRecords()")
}
}
}
database.add(operation)
}
}
//MARK: - Contact the CloudKit Servers
//Two methods that are going to contact the CloudKit servers and process the information by Subscription
///Method to create the subscription and the zone
func configureDatabase(executeClosure: @escaping () -> Void) {
let userSettings = UserDefaults.standard
//Check whether subscriptionSaved has been created
if !userSettings.bool(forKey: "subscriptionSaved") {
//If not we create a new CKDatabaseSubscription
let newSubscription = CKDatabaseSubscription(subscriptionID: "updatesDatabase")
//Setting the Nofication are going to be sent by the server
let info = CKSubscription.NotificationInfo()
info.shouldSendContentAvailable = true
newSubscription.notificationInfo = info
//The subscription is saved on the server
database.save(newSubscription) { (subscription, error) in
if error != nil {
print("ERROR Creating Subscription")
} else {
//Set TRUE if the operation is successful
userSettings.set(true, forKey: "subscriptionSaved")
}
}
}
//Create the custom Zone and check if it was already created
if !userSettings.bool(forKey: "zoneCreated") {
//Create Zone listPlaces to store our records
let newZone = CKRecordZone(zoneName: "listPlaces")
//The newZone is saved on the server
database.save(newZone) { (zone, error) in
if error != nil {
print("ERROR Creating Zone")
} else {
//Set TRUE if the operation is successful
userSettings.set(true, forKey: "zoneCreated")
//We call the closure because we have to be sure that the zone
//was created before trying to store any records.
executeClosure()
}
}
} else {
//The closure is also called if the zone was already created to make sure
//that its code is always executed.
executeClosure()
}
}
///Method to download and process the changes in the database
@objc func checkUpdates(finishClosure: @escaping (UIBackgroundFetchResult) -> Void) {
//To make sure that the database is configured properly, we call configureDatabase()
configureDatabase {
//Code to be execute after we make sure that the Zone was created
let mainQueue = OperationQueue.main
mainQueue.addOperation {
self.downloadUpdates(finishClosure: finishClosure)
}
}
}
///Getting the updates from the server
func downloadUpdates(finishClosure: @escaping (UIBackgroundFetchResult) -> Void) {
var listRecordsUpdated: [CKRecord] = []
var listRecordsDeleted: [String:String] = [:]
// 1. Defining the properties we are going to use to store the tokens
// (one for the database and another for the custom zone)
//For the database Token
var changeToken: CKServerChangeToken!
//For the our custom Zone
var changeZoneToken: CKServerChangeToken!
let userSettings = UserDefaults.standard
// 2. Check if there are tokens already stored in the User Defaults Database
//Note: Because the tokens are instances of the CKServerChangeToken class,
// we can't store their values directly in User Defaults, we first have
// to convert them into Data structures.
if let data = userSettings.value(forKey: "changeToken") as? Data {
//If there is a token unarchive it...
if let token = try? NSKeyedUnarchiver.unarchivedObject(ofClass: CKServerChangeToken.self, from: data) {
// and store it here.
changeToken = token
}
}
if let data = userSettings.value(forKey: "changeZoneToken") as? Data {
//If there is a token unarchive it...
if let token = try? NSKeyedUnarchiver.unarchivedObject(ofClass: CKServerChangeToken.self, from: data) {
// and store it here.
changeZoneToken = token
}
}
// 3. Configure the operations necessary to get the updates from the server.
//Note: We have to perform two operations on the database, one to download
// the list of changes available and another to download the actual changes
// and show them to the user
var zonesIDs: [CKRecordZone.ID] = []
//This Operation requires the previous database token to get only the changes
// that are not available on the device, so we pass the value of the changeToken property.
let operation = CKFetchDatabaseChangesOperation(previousServerChangeToken: changeToken)
//Report the zone that changed
// The closure assigned to this property is executed every time the system
// finds a zone whose content has changed.
operation.recordZoneWithIDChangedBlock = { (zoneID) in
zonesIDs.append(zoneID)
}
//Report the creation a new database token
// This closure is executed every time the system decides to perform the operation
// again to download the changes in separate processes. To make sure that we only
// receive the changes that we did not process yet, we use this closure to update
// the changeToken property with the current token.
operation.changeTokenUpdatedBlock = { (token) in
changeToken = token
}
//Report the conclusion of the operation
// The closure assigned to this property is executed to let the app know that the
// operation is over, and this is the signal that indicates that we have all the
// information we need to begin downloading the changes with the second operation.
//Note: This last closure receives three values: the latest database token, a Boolean
// value that indicates whether there are "more changes" available (by default, the system
// takes care of fetching all the changes, so we don't need to consider this value),
// and a CKError structure to report errors
operation.fetchDatabaseChangesCompletionBlock = { (token, more, error) in
if error != nil {
finishClosure(.failed)
print("ERROR in fetchDatabseChangesComplitionBlock")
} else if !zonesIDs.isEmpty {
//If the array is not empty, we have to configure the CKFetchRecordZoneChangesOperation
// operation to get the changes.
changeToken = token
//Because in this example we only work with one zone, we read the first element of the
// zonesIDs array to get the ID of our custom zone and provide a ZoneConfiguration object
// with the current token stored in the changeZoneToken property.
let configuration = CKFetchRecordZoneChangesOperation.ZoneConfiguration()
configuration.previousServerChangeToken = changeZoneToken
let fetchOperation = CKFetchRecordZoneChangesOperation(recordZoneIDs: zonesIDs, configurationsByRecordZoneID: [zonesIDs[0] : configuration])
//The changes are fetched, and the results are reported to the closures assigned to its properties.
//Operation 1: This closure is called every time a new or updated record is received.
fetchOperation.recordChangedBlock = { (record) in
listRecordsUpdated.append(record)
//First we have to check if the record is of type Countries or Cities
// and store the record in the corresponding array
if record.recordType == Type.Countries.rawValue {
//We use the firstIndex() method to look for duplicates
//If the record already exists in the array, we update its values,
// otherwise, we add the record to the list.
let index = self.listCountries.firstIndex(where: { (item) -> Bool in
return item.recordID == record.recordID
})
if index != nil {
self.listCountries[index!] = record
} else {
self.listCountries.append(record)
}
} else if record.recordType == Type.Cities.rawValue {
//we first check whether the record contains a reference to a country and
//only update or add the record to the array if the reference corresponds
//to the country currently selected by the user
if let country = record["country"] as? CKRecord.Reference {
if country.recordID == self.selectedCountry {
//We use the firstIndex() method to look for duplicates.
//If the record already exists in the array, we update its values,
// otherwise, we add the record to the list.
let index = self.listCities.firstIndex(where: { (item) -> Bool in
return item.recordID == record.recordID
})
if index != nil {
self.listCities[index!] = record
} else {
self.listCities.append(record)
}
}
}
}
}
//Operation 2: This closure is called every time the app receives the ID of a deleted
//record (a record that was deleted from the CloudKit database).
fetchOperation.recordWithIDWasDeletedBlock = { (recordID, recordType) in
listRecordsDeleted[recordID.recordName] = recordType
if recordType == Type.Countries.rawValue {
let index = self.listCountries.firstIndex(where: { (item) -> Bool in
return item.recordID == recordID
})
if index != nil {
self.listCountries.remove(at: index!)
}
} else if recordType == Type.Cities.rawValue {
let index = self.listCities.firstIndex(where: { (item) -> Bool in
return item.recordID == recordID
})
if index != nil {
self.listCities.remove(at: index!)
}
}
}
//Operation 3: Next two closures are executed when the process completes a cycle
//Note: Depending on the characteristics of our application, we may need to perform
// some tasks in these closures
fetchOperation.recordZoneChangeTokensUpdatedBlock = { (zoneID, token, data) in
changeZoneToken = token
}
//Operation 4: The same above
fetchOperation.recordZoneFetchCompletionBlock = { (zoneID, token, data, more, error) in
//If there is an error and we can cast it as a CKError object, the processErrors() method is executed.
if let ckError = error as? CKError {
CloudErrors.processErrors(error: ckError)
} else {
changeZoneToken = token
//To store the records that were updated in an array called listRecordsUpdated and
//the record IDs of the records that were deleted in a dictionary called listRecordsDeleted.
//When the fetching of the record zone is finished, we call the methods updateLocalRecords()
//and deleteLocalRecords() with these values to perform the corresponding changes in the
//persistent store.
self.updateLocalRecords(listRecordsUpdated: listRecordsUpdated)
self.deleteLocalRecords(listRecordsDeleted: listRecordsDeleted)
listRecordsUpdated.removeAll()
listRecordsDeleted.removeAll()
}
}
//Operation 5: This closure is called to report that the operation is over.
fetchOperation.fetchRecordZoneChangesCompletionBlock = { (error) in
if error != nil {
finishClosure(.failed)
print("Error in fetchOperation.fetchRecordZoneChangesCompletionBlock")
} else {
//If no error is found, we permanently store the current tokens in the User Defaults database
if changeToken != nil {
//To store the tokens we have to turn them into Data structures and encode them with the archivedData()
if let data = try? NSKeyedArchiver.archivedData(withRootObject: changeToken!, requiringSecureCoding: false) {
userSettings.set(data, forKey: "changeToken")
}
}
if changeZoneToken != nil {
//To store the tokens we have to turn them into Data structures and encode them with the archivedData()
if let data = try? NSKeyedArchiver.archivedData(withRootObject: changeZoneToken!, requiringSecureCoding: false) {
userSettings.set(data, forKey: "changeZoneToken")
}
}
///Removida com a implementação do CoreData
// self.updateInterface()
self.updateLocalReference()
//If there are chnages we have to tell the system that new data has been downloaded.
finishClosure(.newData)
}
}
// 4. After the definition of each fetchOperation and their properties, we call the add() method
// of the CKDatabase object to add them to the database.
self.database.add(fetchOperation)
} else {
//If there are no changes available
finishClosure(.noData)
}
}
// 5. After the definition of each operation and their properties, we call the add() method
// of the CKDatabase object to add them to the database.
database.add(operation)
}
}
var AppData = ApplicationData()
|
bb942d7949eb3fa1ca264a53f17aac29e7301fb5
|
[
"Swift",
"Markdown"
] | 7 |
Swift
|
AfonsohsS/CloudKitDash
|
27d4b82349d406a258167835c8b4fe50359cf87e
|
5a24cca1826bd78c9843e34fcccc9b9abefb49fb
|
refs/heads/master
|
<repo_name>hscheuerle/openforge-assessment-hscheuerle<file_sep>/src/app/tab2/tab2.page.ts
import { Component, OnInit } from '@angular/core';
import { Store } from '@ngrx/store';
import { Observable } from 'rxjs';
import { ActivatedRoute } from '@angular/router';
import { InAppBrowser, InAppBrowserObject } from '@ionic-native/in-app-browser/ngx';
import { State } from '../reducers';
@Component({
selector: 'app-tab2',
templateUrl: 'tab2.page.html',
styleUrls: ['tab2.page.scss']
})
export class Tab2Page {
value;
selectedUser$: Observable<object> = this.store.select(state => state.github.selectedUser);
browser: InAppBrowserObject;
constructor(
private store: Store<State>,
private route: ActivatedRoute,
private iab: InAppBrowser,
) { }
ionViewDidEnter() {
const search = this.route.snapshot.queryParamMap.get('search');
if (search) { this.value = search; } else { this.value = ''; }
}
ionViewDidLeave() {
this.store.dispatch({ type: '[Github API] Clear User'});
if (this.browser) {
this.browser.close();
}
}
searchUser(event) {
const { value } = event.target;
if (value !== '') {
console.log('searchUser dispatches search');
this.store.dispatch({ type: '[Github API] Search User', props: { input: value } });
} else { // when using backspace | ctrl + backspace to clear
this.store.dispatch({ type: '[Github API] Clear User'});
}
}
clearValue() {
this.store.dispatch({ type: '[Github API] Clear User'});
}
openInAppBrowser(event, blog) {
event.preventDefault();
const prefixUrl = 'https://' + blog.replace('http://', '').replace('https://', '');
this.browser = this.iab.create(prefixUrl, '_blank');
}
}
<file_sep>/src/app/effects/github.effects.ts
import { Injectable } from '@angular/core';
import { Actions, createEffect, ofType } from '@ngrx/effects';
import { of } from 'rxjs';
import { map, catchError, withLatestFrom, switchMap, tap, exhaustMap } from 'rxjs/operators';
import { GithubService } from '../shared/github.service';
import { Store, Action } from '@ngrx/store';
import { searchUser, searchUserSuccess } from '../actions/github.actions';
import { UserBasic, SeachedUserSuccessPayload } from '../interfaces/User';
import { State } from '../reducers';
@Injectable()
export class GithubEffects {
constructor(
private actions$: Actions,
private githubService: GithubService,
private store: Store<State>
) { }
loadUsers$ = createEffect(() => this.actions$.pipe(
ofType('[Github API] Load Users'),
withLatestFrom(this.store.select(state => state.github.since)),
exhaustMap(([action, since]) => this.githubService.requestUsers(since)
.pipe(
map(res => ({ type: '[Github API] Users Loaded Success', payload: res })),
catchError(() => of({ type: '[Github API] Users Loaded Error'})),
))
));
// only map success when res contains values
searchUser$ = createEffect(() => this.actions$.pipe(
ofType(searchUser),
switchMap(action => this.githubService.searchUser(action.props.input).pipe(
map(res => ({ type: '[Github API] Search User Success', payload: res}) as { type: string, payload: SeachedUserSuccessPayload }),
catchError(() => of({ type: '[Github API] Search User Error'})),
)
)));
getUser$ = createEffect(() => this.actions$.pipe(
ofType(searchUserSuccess),
// need to handle items better!
switchMap(action => this.githubService.getUser(action.payload.items[0].login).pipe(
map(res => ({ type: '[Github API] Get User Success', payload: res})),
catchError(() => of({ type: '[Github API] Get User Error'})),
)
)));
}
<file_sep>/src/app/shared/github.service.ts
import { Injectable } from '@angular/core';
import { HttpClient, HttpResponse, HttpHeaders } from '@angular/common/http';
import { Observable, of } from 'rxjs';
import { environment } from 'src/environments/environment';
import * as parseLinkHeader from 'parse-link-header';
import { map } from 'rxjs/operators';
import { UserBasic } from '../interfaces/User';
@Injectable({
providedIn: 'root'
})
export class GithubService {
devToken: string;
options;
constructor(private http: HttpClient) {
this.devToken = environment.devToken;
this.options = {
headers: {
Accept: 'application/vnd.github.v3.text-match+json',
'Content-Type': 'application/json',
Authorization: `token ${this.devToken}`,
},
observe: 'response',
};
}
requestUsers(sinceId: string) {
return this.http.get<UserBasic[]>('https://api.github.com/users', {
headers: this.options.headers,
observe: this.options.observe,
params: {
since: sinceId
}
}).pipe(
map((res: HttpResponse<UserBasic[]>) => {
const linkHeader = res.headers.get('Link');
const links = parseLinkHeader(linkHeader);
const { since } = links.next;
return { users: res.body, since };
})
);
}
searchUser(input) {
return this.http.get('https://api.github.com/search/users', {
headers: this.options.headers,
params: {
q: `${input}+type:user`
}
});
}
getUser(username) {
return this.http.get(`https://api.github.com/users/${username}`, {
headers: this.options.headers,
});
}
}
<file_sep>/src/app/actions/github.actions.ts
import { createAction, props } from '@ngrx/store';
import { UserBasic, SeachedUserSuccessPayload, UserDetailed } from '../interfaces/User';
export const loadUsers = createAction('[Github API] Load Users');
export const usersLoadedSuccess = createAction('[Github API] Users Loaded Success',
props<{ payload: { users: UserBasic[], since: string } }>());
export const searchUser = createAction('[Github API] Search User',
props<{ props: { input: string } }>());
export const searchUserSuccess = createAction('[Github API] Search User Success',
props<{ payload: SeachedUserSuccessPayload }>());
export const getUserSuccess = createAction('[Github API] Get User Success',
props<{ payload: UserDetailed }>());
export const clearUser = createAction('[Github API] Clear User');
<file_sep>/src/app/reducers/index.ts
import {
ActionReducer,
ActionReducerMap,
createFeatureSelector,
createSelector,
MetaReducer
} from '@ngrx/store';
import { environment } from '../../environments/environment';
import { githubReducer } from './github.reducers';
import { UserDetailed, UserBasic } from '../interfaces/User';
export interface State {
github: {
selectedUser: UserDetailed;
users: UserBasic[];
since: string;
};
}
export const reducers: ActionReducerMap<State> = {
github: githubReducer
};
export const metaReducers: MetaReducer<State>[] = !environment.production ? [] : [];
<file_sep>/src/app/interfaces/User.ts
export interface UserBasic {
login: string;
id: number;
node_id: string;
avatar_url: string;
gravatar_id: string;
url: string;
html_url: string;
followers_url: string;
following_url: string;
gists_url: string;
starred_url: string;
subscriptions_url: string;
organizations_url: string;
repos_url: string;
events_url: string;
received_events_url: string;
type: string;
site_admin: boolean;
}
export interface SearchedUser extends UserBasic {
score: number;
text_matches: {
object_url: string;
object_type: string;
property: string;
fragment: string;
}[];
}
export interface SeachedUserSuccessPayload {
total_count: number;
incomplete_results: boolean;
items: SearchedUser[];
}
export interface UserDetailed extends UserBasic {
name: string;
company: string;
blog: string;
location: string;
email: string;
hireable: boolean | null;
bio: string | null;
public_repos: number;
public_gists: number;
followers: number;
following: number;
created_at: string;
updated_at: string;
}
<file_sep>/src/app/tab2/color-break.directive.ts
import { Directive, Input, ElementRef, OnInit, Renderer2 } from '@angular/core';
@Directive({
selector: '[appColorBreak]'
})
export class ColorBreakDirective implements OnInit {
@Input() appColorBreak: boolean;
constructor(private el: ElementRef, private renderer: Renderer2) {}
ngOnInit() {
if (this.appColorBreak) {
this.renderer.setAttribute(this.el.nativeElement, 'color', 'secondary');
} else {
this.renderer.setAttribute(this.el.nativeElement, 'color', 'primary');
}
}
}
<file_sep>/src/app/reducers/github.reducers.ts
import { createReducer, on } from '@ngrx/store';
import { usersLoadedSuccess, searchUserSuccess, getUserSuccess, clearUser } from '../actions/github.actions';
import { UserBasic, UserDetailed } from '../interfaces/User';
interface GithubState {
selectedUser: UserDetailed;
users: UserBasic[];
since: string;
}
export const initialState: GithubState = {
selectedUser: undefined,
users: [],
since: '0',
};
// tslint:disable-next-line: variable-name
const _githubReducer = createReducer(initialState,
on(usersLoadedSuccess, (state, { payload }) => {
const { since, users } = payload;
return { ...state, users: [...state.users, ...users], since };
}),
on(getUserSuccess, (state, { payload }) => {
return { ...state, selectedUser: payload };
}),
on(clearUser, (state) => {
return { ...state, selectedUser: undefined };
})
);
export function githubReducer(state, action) {
return _githubReducer(state, action);
}
<file_sep>/src/app/tab1/tab1.page.ts
import { Component, OnInit, OnDestroy } from '@angular/core';
import { Subscription } from 'rxjs';
import { Store } from '@ngrx/store';
import { UserBasic } from '../interfaces/User';
import { State } from '../reducers';
@Component({
selector: 'app-tab1',
templateUrl: 'tab1.page.html',
styleUrls: ['tab1.page.scss']
})
export class Tab1Page implements OnInit, OnDestroy {
users: UserBasic[];
eventRef;
subscription: Subscription;
constructor(
private store: Store<State>
) { }
ngOnInit() {
this.subscription = this.store.select(state => state.github.users).subscribe(users => {
this.users = users;
if (this.eventRef) {
this.eventRef.target.complete();
this.eventRef = undefined;
}
});
}
ngOnDestroy() {
this.subscription.unsubscribe();
}
ionViewDidEnter() {
if (this.users.length === 0) {
this.store.dispatch({ type: '[Github API] Load Users'});
}
}
loadData(event) {
this.eventRef = event;
this.store.dispatch({ type: '[Github API] Load Users'});
}
loadMoreUsers() {
this.store.dispatch({ type: '[Github API] Load Users'});
}
}
<file_sep>/README.md
# openforge-assessment-hscheuerle
48 hour assessment for OpenForge. Demonstrates angular, ngrx, and ionic proficiency.
## usage
because github api is no longer fully accessable without some authentication, to run this app in development, you need to create and place an access token in the project.
1. create the file ./env.ts in the project root
2. add the following line and save
<pre><code>export const DEV_TOKEN = '<PASSWORD>_token_<PASSWORD>';</code></pre>
|
8d671533477acdccf9785f48ae49b19a1d1f7f32
|
[
"Markdown",
"TypeScript"
] | 10 |
TypeScript
|
hscheuerle/openforge-assessment-hscheuerle
|
4a42f3af97451cd5f7fc9521b84300a4484a5e12
|
c8d1d93882ec604460232c95e7f1d7c6d53159dc
|
refs/heads/master
|
<file_sep>let apples_data;
let alarm_clocks_data;
let cats_data;
const CAT = 0;
const ALARM_CLOCK = 1;
const APPLE = 2;
const len = 784;
const total_data = 1000;
let apples = {};
let alarm_clocks = {};
let cats = {};
let nn;
function preload(){
apples_data = loadBytes('./data/apple1000.bin');
alarm_clocks_data = loadBytes('./data/alarm_clock1000.bin');
cats_data = loadBytes('./data/cat1000.bin');
}
function prepareData(category, data, label){
category.training = [];
category.testing = [];
for (let i = 0; i < total_data; i++) {
let offset = i * len;
let threshold = floor(.8 * total_data);
if (i < threshold) {
category.training[i] = data.bytes.subarray(offset, offset + len);
category.training[i].label = label
} else {
category.testing[i - threshold] = data.bytes.subarray(offset, offset + len);
category.testing[i - threshold].label = label
}
}
}
function trainEpoch(training){
for (let i = 0; i < training.length; i++) {
// for(let i = 0; i < 1; i++){
let inputs = []
let data = training[i]
inputs = data.map((x => x / 255));
let label = training[i].label;
let targets = [0, 0, 0];
targets[label] = 1;
nn.train(inputs, targets);
}
}
function testAll(testing) {
let correct = 0;
for (let i = 0; i < testing.length; i++) {
// for (let i = 0; i < 1; i++) {
let inputs = []
let data = testing[i]
inputs = data.map((x => x / 255));
let label = testing[i].label;
let guess = nn.predict(inputs)
// console.log(guess)
let m = max(guess);
let classification = guess.indexOf(m);
// console.log(guess);
// console.log(classification);
// console.log(label)
if(classification == label){
correct ++;
}
}
let percentage = 100 * correct / testing.length;
return percentage
}
function draw(){
strokeWeight(8);
stroke(255)
if(mouseIsPressed){
line(pmouseX, pmouseY, mouseX, mouseY);
}
}
function setup(){
createCanvas(280,280);
background(0);
prepareData(cats,cats_data, CAT)
prepareData(apples, apples_data, APPLE)
prepareData(alarm_clocks, alarm_clocks_data, ALARM_CLOCK)
nn = new NeuralNetwork(784,64,3);
let training = [];
training = training.concat(cats.training);
training = training.concat(alarm_clocks.training);
training = training.concat(apples.training);
shuffle(training,true);
let testing = [];
testing = testing.concat(cats.testing);
testing = testing.concat(alarm_clocks.testing);
testing = testing.concat(apples.testing);
console.log(testing)
let trainButton = select('#train');
let epochCounter = 0;
trainButton.mousePressed(function(){
trainEpoch(training);
epochCounter++;
console.log('Epoch: ' + epochCounter)
})
let testButton = select('#test')
testButton.mousePressed(function(){
let percentage = testAll(testing);
console.log('percent: ' + nf(percentage,2,2) + "%")
})
let guessButton = select('#guess')
guessButton.mousePressed(function(){
let inputs = [];
let img = get();
img.resize(28,28);
img.loadPixels();
for (let i = 0; i < len; i++) {
let bright = img.pixels[i*4];
inputs[i] = bright / 255.0
}
console.log(inputs);
let guess = nn.predict(inputs)
let m = max(guess);
let classification = guess.indexOf(m);
if(classification === CAT){
console.log('cat')
}else if (classification == APPLE){
console.log('apple')
}else if(classification == ALARM_CLOCK){
console.log('alarm clock')
}
})
// let total = 100;
// for (let n = 0; n < total; n++) {
// let img = createImage(28,28, );
// img.loadPixels();
// let offset = n * 784;
// for (let i = 0; i < 784; i++) {
// let val = apples_data.bytes[i + offset];
// img.pixels[i * 4 + 0] = val;
// img.pixels[i * 4 + 1] = val;
// img.pixels[i * 4 + 2] = val;
// img.pixels[i * 4 + 3] = 255;
// }
// let x = (n % 10) * 28;
// let y = floor(n / 10) * 28;
// img.updatePixels(img,x,y);
// }
}
|
b157f4fd5d20c9ae5bb363c4437c22b4ed96fb0f
|
[
"JavaScript"
] | 1 |
JavaScript
|
dgbarbosa/machine-learning_doodle-recognition
|
e633b1c3aef12b4ecbc96e299d1a1c7aa2d3aff2
|
04f5a8c68b5023fc1aad49f8f16e21f67a2ed7a2
|
refs/heads/main
|
<file_sep><?php
class BlouseSearchCest
{
/**
* Проверить поиск по тексту и открытие модального окна с товаром
* */
public function searchForBlouseCest(AcceptanceTester $I)
{
$searchQueryTopCSS = '#search_query_top';
$searchQueryTopXPath = '//*[@id="search_query_top"]';
$searchBoxButtonCSS = '[name="submit_search"]';
$searchBoxButtonXPath = '//button[@name="submit_search"]';
$searchResultsCSS = '.heading-counter';
$searchResultsXPath = '//span[@class="heading-counter"]';
$itemElementCSS = '[alt="Blouse"]';
$itemElementXPath = '//img[@alt="Blouse"]';
$quickViewCSS = 'quick-view';
$quickViewXPath = '//a[@class="quick-view"]';
$iFrameCSS = '.fancybox-iframe';
$iFrameXPath = '//iframe[@class="fancybox-iframe"]';
$blouseTitleCSS = '[itemprop="name"]';
$blouseTitleXPath = '//h1[@itemprop="name"]';
$I->amOnPage('');
// Ждёмс прогрузку поиска и кликаем
$I->waitForElement('#search_query_top');
$I->fillField('#search_query_top', 'Blouse');
$I->click('#searchbox > button');
// Ждём прогрузки страницы с результами поиска, а конкретно кнопочки "Quick view" и жмакаем на неё
$I->waitForElement('.left-block');
$I->moveMouseOver('.left-block');
$I->waitForElement('.quick-view');
$I->click('.quick-view');
// Ждём пока откроется модальное окно
$I->switchToIFrame('.fancybox-iframe');
$I->waitForElement('#product > div > div > div.pb-left-column.col-xs-12.col-sm-4.col-md-5');
// Если это действительно блузка - тест успешно пройден
$I->see('Blouse', '//*[@id="product"]/div/div/div[2]/h1');
}
}
<file_sep><?php
namespace Page\Acceptance;
class SearchPage
{
/**
* Селектор выбранной кнопки Grid
*/
public static $chosenGridButton = '//li[@id="grid" and @class="selected"]';
/**
* Селектор кнопки List
*/
public static $listButton = '//li[@id="list"]';
/**
* Селектор блока с кнопкой Add to card при отображении списком
*/
public static $rightBlockContent = '//div[@class="right-block-content row"]';
/**
* Урл страницы с результатами поиска
*/
public static $URL = '/index.php?id_category=11&controller=category';
/**
* Declare UI map for this page here. CSS or XPath allowed.
* public static $usernameField = '#username';
* public static $formSubmitButton = "#mainForm input[type=submit]";
*/
/**
* Basic route example for your current URL
* You can append any additional parameter to URL
* and use it in tests like: Page\Edit::route('/123-post');
*/
public static function route($param)
{
return static::$URL.$param;
}
/**
* @var \AcceptanceTester;
*/
protected $acceptanceTester;
public function __construct(\AcceptanceTester $I)
{
$this->acceptanceTester = $I;
}
}
<file_sep><?php
class PrintedDressSearchCest
{
// tests
public function searchForFiveGoods(FunctionalTester $I)
{
$I->amOnPage('');
// Ждёмс прогрузку поиска и кликаем
$I->seeElement('#search_query_top');
$I->fillField('#search_query_top', 'Printed dress');
$I->click('#searchbox > button');
// Ждём прогрузки страницы с результами поиска и считаем кол-во карточек товара
$I->seeElement('#center_column > h1 > span.heading-counter');
$I->seeNumberOfElements('.available-now', 5);
}
}
<file_sep><?php
class PrintedDressSearchCest
{
// tests
public function searchForFiveGoods(FunctionalTester $I)
{
$searchQueryTopCSS = '#search_query_top';
$searchQueryTopXPath = '//*[@id="search_query_top"]';
$searchBoxButtonCSS = '[name="submit_search"]';
$searchBoxButtonXPath = '//button[@name="submit_search"]';
$searchResultsCSS = '.heading-counter';
$searchResultsXPath = '//span[@class="heading-counter"]';
$itemPlatesCSS = '.available-now';
$itemPlatesXPath = '//span[@class="available-now"]';
$I->amOnPage('');
// Ждёмс прогрузку поиска и кликаем
$I->seeElement('#search_query_top');
$I->fillField('#search_query_top', 'Printed dress');
$I->click('#searchbox > button');
// Ждём прогрузки страницы с результами поиска и считаем кол-во карточек товара
$I->seeElement('#center_column > h1 > span.heading-counter');
$I->seeNumberOfElements('.available-now', 5);
}
}
<file_sep><?php
use Page\Acceptance\LoginPage;
class FailedLoginCest
{
/**
* Проверить закрытие диалогового окна после неудачного логина
* */
public function closeWindowAfterFail(AcceptanceTester $I)
{
$loginPage = new LoginPage($I);
$I->amOnPage(LoginPage::$URL);
$I->fillField(LoginPage::$loginInput, LoginPage::USERNAME);
$I->fillField(LoginPage::$passwordInput, LoginPage::PASSWORD);
$I->click(LoginPage::$loginButton);
$I->waitForElement(LoginPage::$errorWindowCloseButton);
$loginPage->closeErrorWindow();
$I->dontSeeElement(LoginPage::$errorWindowCloseButton);
}
}
<file_sep><?php
namespace Page\Acceptance;
class LoginPage
{
/**
* Стандартный юзернейм для фэйла при авторизации
*/
public const USERNAME = "locked_out_user";
/**
* Стандартный пасс для юзера
*/
public const PASSWORD = "<PASSWORD>";
/**
* Селектор для поля ввода логина
*/
public static $loginInput = '#user-name';
/**
* Селектор для поля ввода пароля
*/
public static $passwordInput = '#password';
/**
* Селектор для кнопки логина
*/
public static $loginButton = '#login-button';
/**
* Селектор для кнопки закрытия окна ошибки
*/
public static $errorWindowCloseButton = '//button[@class="error-button"]';
/**
* Урл страницы авторизации
*/
public static $URL = '';
/**
* Declare UI map for this page here. CSS or XPath allowed.
* public static $usernameField = '#username';
* public static $formSubmitButton = "#mainForm input[type=submit]";
*/
/**
* Basic route example for your current URL
* You can append any additional parameter to URL
* and use it in tests like: Page\Edit::route('/123-post');
*/
public static function route($param)
{
return static::$URL.$param;
}
/**
* @var \AcceptanceTester;
*/
protected $acceptanceTester;
public function __construct(\AcceptanceTester $I)
{
$this->acceptanceTester = $I;
}
/**
* Закрывает окно с ошибкой авторизации
*/
public function closeErrorWindow()
{
$this->acceptanceTester->click(self::$errorWindowCloseButton);
}
}
<file_sep><?php
class BlouseSearchCest
{
/**
* Проверить поиск по тексту и открытие модального окна с товаром
* */
public function searchForBlouseCest(AcceptanceTester $I)
{
$I->amOnPage('');
// Ждёмс прогрузку поиска и кликаем
$I->waitForElement('#search_query_top');
$I->fillField('#search_query_top', 'Blouse');
$I->click('#searchbox > button');
// Ждём прогрузки страницы с результами поиска, а конкретно кнопочки "Quick view" и жмакаем на неё
$I->waitForElement('.left-block');
$I->moveMouseOver('.left-block');
$I->waitForElement('.quick-view');
$I->click('.quick-view');
// Ждём пока откроется модальное окно
$I->switchToIFrame('.fancybox-iframe');
$I->waitForElement('#product > div > div > div.pb-left-column.col-xs-12.col-sm-4.col-md-5');
// Если это действительно блузка - тест успешно пройден
$I->see('Blouse', '//*[@id="product"]/div/div/div[2]/h1');
}
}
<file_sep><?php
use Page\Acceptance\MainPage;
use Page\Acceptance\SearchPage;
class LayoutCest
{
/**
* Сменить отображение результатов поиска с Grid на List
* */
public function changeLayout(AcceptanceTester $I)
{
$searchPage = new MainPage($I);
$I->amOnPage(MainPage::$URL);
$I->moveMouseOver(MainPage::$dressesButton);
$I->waitForElement(MainPage::$summerDressesButton);
$I->click(MainPage::$summerDressesButton);
$I->amOnPage(SearchPage::$URL);
$I->seeElement(SearchPage::$chosenGridButton);
$I->click(SearchPage::$listButton);
$I->seeElement(SearchPage::$rightBlockContent);
// $I->fillField(LoginPage::$loginInput, LoginPage::USERNAME);
// $I->fillField(LoginPage::$passwordInput, LoginPage::PASSWORD);
// $I->click(LoginPage::$loginButton);
// $I->waitForElement(LoginPage::$errorWindowCloseButton);
// $loginPage->closeErrorWindow();
// $I->dontSeeElement(LoginPage::$errorWindowCloseButton);
}
}
|
898e2db95aedff5343befb30e2acd14b0b3b0598
|
[
"PHP"
] | 8 |
PHP
|
JemboDev/qa-automation-pg
|
114439ff23fcd59fb185cd3a2eacd1cb9845865e
|
0c26553cd0418b6321fb5c258db57e21b3076ca8
|
refs/heads/master
|
<repo_name>scshepard/joeahand<file_sep>/content/future-books.md
Modified: 2014-03-11T17:36:50.650850
Status: published
Date: 2011-05-03T00:00:00
Slug: future-books
Title: Future of Books is in the Container
Category: note
Kind: note
I feel like the biggest driving issue with digital books is the container. Once the container gets standardized, the content and usage can become more standardized. So I guess the question is how to you become the standard container for a type of information. I would imagine the book had some sort of competition, people who bound the pages in a different way or presented ideas differently, right? It does not really sound right.
I wonder if we are too stuck on the idea of a *book*. Does a remix machine need even to have that idea. There is definitely some idea of cognitive location within an argument. Without knowing how long the argument lasts it may be difficult to focus or understand what the writer is saying. But how can we say that when have books that range from 50 pages to thousands. We have messages that are limited to 140 characters and messages that are unlimited.
When information first started to be communicated over wires a new neo-language had to be invented to convey the message. Then in order to protect privacy and save money people went further to create secret languages on top of this neo-language. That is a pretty powerful idea.
Just by having this standard means of communication people were able to be so innovative around it. Was it the standardization or the uniqueness or what? It seems like the morse-code telegraph broke so many boundaries it had to be a hot bed ripe for innovation. So how do you create that kind of atmosphere around a product.
Does it tell a story? Or connect people? Or just provide economic advantage?
Maybe our thinking is indeed too narrow. Is the portal or the container more important. I think of Google as a portal. iTunes is more of a container. Both are powerful market movers.
Google has used the portal to become a container for other types of goods. What gave them an advantage here?<file_sep>/content/first-post.md
Modified: 2014-03-11T17:29:54.034946
Status: published
Date: 2011-03-30T00:00:00
Slug: first-post
Title: First Post
Category: uncategorized
Kind: article
I struggle to comprehend how our country has changed since its founding. Reading the autobiography of <NAME> I am struck by the amount of civility that abounds in the congress. People were actually pleasant enough to argue about an issue, compromise, then create a document that everyone liked. Has the character of our person's changed that dramatically.
This country lacks a solid moral compass. Our effects create the future yet we discard any ideas that take that into account. Everything is about the present. What helps me and now is not soon enough.
How can our youth understand this idea if the majority of adults refuse to accept the notion?
There is a moral struggle going on in our country. The right is scared of changes, progress, and new ideas. The left is afraid of shit I dunno- maybe how our country is spiraling down and down with every new law passed.
Our country was founded on compromise, new ideas, and a basic moral constitution. Where did those founding principles go.
Would we be able to recognize the benefit of a public hospital or library today. Could those institutions be created in this political climate? I doubt it. The public hospital first proposed required that the government match and private contributions. That would never happen in a million years. What changed?
<NAME> had an interesting line in his autobiography about people's unwillingness to change. He understood that accepting new ideas is difficult unless the situation forces it. How can we put ourselves in situations to force new ideas while remaining comfortable- or is that even possible. Perhaps it is the trickiness of situations that allow us to see situations in new light.
Franklin was a powerful man, yet he did not act is such a way. Franklin wanted everyone to accept an idea before it was put into practice. He was able to be convincing be trying to find what that person's individual interest or notion was. He then worked to tie that to the big picture.
Perhaps, we just lack that sort of big picture thinkers. Franklin was able to do a remarkable job of bringing everyone together without forcing one side to concede more than another. He wanted the largest benefit to the public in each situation.<file_sep>/content/happiness-vs-happiness.md
Modified: 2014-03-11T17:43:23.383418
Status: published
Date: 2011-07-21T00:00:00
Slug: happiness-vs-happiness
Title: Happiness VS Happiness
Category: uncategorized
Kind: article
The battle of genuine happiness and short-term happiness is a battle between beasts and humanity. Which side are you on?
The fate of mankind rests on the ability for individuals to pursue happiness. Throughout life people seek to maximize happiness and minimize suffering. But in order to succeed, we must work together.
####Teaching Happiness Strangely many of us go through more than twelve years of schooling without ever taking a class on how to be happy.
Perhaps finding happiness is so easy we do not need to teach it. Or maybe we learn how to be happy in every class because of the importance of happiness. No, the responsibility must fall on parents to teaching kids how to be happy. Well surely happiness cannot be taught, it must be learned from life. Yes, that *must* be the answer, ***you learn how to be happy by living***. Therefore, every adult has the duty to show children how to live with genuine happiness.
When a parent tells a child to eat their veggies or to stop watching TV and do their homework, they do so for the child's own good. Parents force the child to forgo the short-term sensory happiness of television for their long-term happiness. The child may not agree or understand how studying will make them happy, but the adults know. Yet so many adults act like children when seeking happiness.
####Immediate Happiness Short-term interests plague the country. Looking at any of the problems the country faces-- economic issues, obesity, never-ending legal battles, or war-- adults everywhere pursue the quickest route to pleasure mistaking it for happiness. Sadly, many of these actions ignore the personal and social consequences over time.
>Our tendency is to rush in and do what seems to promise the shortest route to satisfaction. But in doing so, all too frequently we deprive ourselves of the opportunity for a greater degree of fulfillment. >>**The Dalai Lama**, Ethics for the New Millennium <small>(p. 53)</small>
The harmless super-sizing of a meal, fighting for every cent of profit, or neglecting a group of people for the richer lobbyists on the other side-- all of these actions feel great right now. They feel like the path to happiness. But these acts only lead to a desire for more. More food, more money, more power. Gluttony precedes more problems. ***When our indulgence causes suffering for others is when we lose our humanity and become beasts.***
###The Humanity of Genuine Happiness
Genuine happiness. A tricky idea. Too often our desire for satisfaction leads us to satiating our brute impulses, mistaking them for happiness. But rather than seeking happiness externally, we must realize that happiness is not a thing that can be reached but an attitude, a state of being. More importantly, the right actions follow from right attitude. But, here comes the real shocker, right actions help on the path to right attitude. Showing kindness, compassion, love, or concern for others does not diminish the sense of yourself.
Whether you like it or not, you are dependent on others and others on you. This is the beauty of humanity. If we all work for personal pleasure while making others suffer, we will all suffer. But everyone can pursue happiness together and we can all enjoy a bigger slice of the pie.
>We come into the world as the result of others' actions. We survive here in dependence on others. Whether we like it or not, there is hardly a moment of our lives when we do not benefit from others' activities. For this reason, it is hardly surprising that most of our happiness arises in the context of our relationships with others. >>**The Dalai Lama**, Ethics for the New Millennium <small>(p. 62)</small>
###Act Now
Developing a sense of inner peace and the right attitude can take years or a lifetime. It can only be achieved through patience and hard work. But nothing stops you from helping others on the path to genuine happiness. The remarkable thing is, it will help you along your path too.
* ***Worry less*** about your own suffering or sadness. * ***Show concern*** for a friend or stranger. * Guide them towards happiness with ***compassion and kindness***. * Realize you have just taken a step towards your own happiness.
#####Do you want to be an animal or a human?
Are you going to make your inner child eats all the veggies before it leaves the table? Or learn something rather than watch TV?<file_sep>/pelicanconf.py
#!/usr/bin/env python
# -*- coding: utf-8 -*- #
import time
from urllib.parse import urlparse
def get_domain(url):
''' Return just the domain (and subdomain!) for a url
'''
parsed_uri = urlparse(url)
domain = '{uri.netloc}'.format(uri=parsed_uri)
domain = domain.replace('www.', '')
return domain
JINJA_FILTERS = {
'get_domain':get_domain,
}
LAST_UPDATE = str(time.strftime('%m %Y'))
YEAR = str(time.strftime('%Y'))
SITEURL = ''
AUTHOR = u'<NAME>'
AUTHOR_LINKS = {
'INSTAGRAM' : 'http://instagram.com/joeahand',
'GITHUB' : 'https://github.com/joehand',
'TWITTER' : 'http://twitter.com/joeahand/',
# use html entities to obfuscate for spammers (http://stackoverflow.com/questions/748780/best-way-to-obfuscate-an-e-mail-address-on-a-website)
'EMAIL' : 'joe@joeahand.com'
}
SITENAME = u'<NAME>'
SITESUBTITLE = u'Better cities with local data'
NAV_PAGES = ['about', 'cv']
THEME = 'themes/joe/'
PATH = 'content'
TIMEZONE = 'US/Mountain'
DEFAULT_LANG = u'en'
DEFAULT_DATE_FORMAT = '%Y-%B-%d'
DIRECT_TEMPLATES = ('index', 'tags', 'categories', 'archives','sitemap')
SITEMAP_SAVE_AS = 'sitemap.xml'
STATIC_PATHS = []
PLUGIN_PATHS = ["plugins", 'plugins/pelican-plugins']
PLUGINS = [
'assets',
'pelican_gdocs'
]
# PLUGIN Settings
GITHUB_USER = 'joehand'
GDOCS = [
{
'name':'instagram',
'url':'http://docs.google.com/spreadsheets/d/16KHyJyTGvOIFKTR5uUHrXKWH3kf-UiucCwXfceFet0k/pub?gid=0&single=true&output=csv'
},
{
'name':'articles',
'url':'http://docs.google.com/spreadsheets/d/1Wav1nDxtOTRm3WMLL3RI0oqApxLjBxzTcPftWsCn6x4/pub?gid=0&single=true&output=csv'
},
{
'name':'fitbit_activity',
'url':'http://docs.google.com/spreadsheets/d/1AZRyvrcm-Stk0VlWoPEHD4sxe1PTOdEpU2MejRzHB7s/pub?gid=0&single=true&output=csv'
},
{
'name':'tweets',
'url':'http://docs.google.com/spreadsheets/d/1qRuICBJWHQQ34ujTXkY8jh7obJuVJ_quLbwMrBiQFyg/pub?gid=0&single=true&output=csv'
},
{
'name':'steps',
'url':'https://docs.google.com/spreadsheets/d/1AZRyvrcm-Stk0VlWoPEHD4sxe1PTOdEpU2MejRzHB7s/pub?gid=0&single=true&output=csv'
},
{
'name':'coffee',
'url':'https://docs.google.com/spreadsheets/d/1fsaSy8HJdoTr5iUX7p-iCxUwC-TFzZxnqNzt6mMP26s/pub?gid=0&single=true&output=csv'
},
]
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
AUTHOR_FEED_ATOM = None
AUTHOR_FEED_RSS = None
ARTICLE_URL = 'archive/{slug}/'
ARTICLE_SAVE_AS = 'archive/{slug}/index.html'
PAGE_URL = '{slug}/'
PAGE_SAVE_AS = '{slug}/index.html'
CATEGORY_URL = 'category/{slug}/'
CATEGORY_SAVE_AS = 'category/{slug}/index.html'
TAG_URL = ''
TAG_SAVE_AS = ''
AUTHOR_URL = ''
AUTHOR_SAVE_AS = ''
ARCHIVES_URL = 'archive/'
ARCHIVES_SAVE_AS = 'archive/index.html'
#YEAR_ARCHIVE_SAVE_AS = 'posts/{date:%Y}/index.html'
DEFAULT_PAGINATION = 5
PAGINATION_PATTERNS = (
(1, '{base_name}/', '{base_name}/index.html'),
(2, '{base_name}/{number}/', '{base_name}/{number}/index.html'),
)
# Uncomment following line if you want document-relative URLs when developing
RELATIVE_URLS = True
#SPECIAL THEME SETTINGS
HOME_PAGE = {
'content' : 'home',
'count' : 0,
'partial' : True,
'links' : [
('Projects',SITEURL + '#test'),
('Longer Bio',SITEURL + '/about/'),
('Writing','http://medium.com/@joehand'),
]
}
COPYRIGHT_LINK = 'http://creativecommons.org/licenses/by-nc-nd/4.0/'
<file_sep>/content/pilates.md
Modified: 2014-03-11T17:48:25.396096
Status: published
Date: 2011-04-17T00:00:00
Slug: pilates
Title: Health and Pilates
Category: better human
Kind: article
###Intro to Pilates
<NAME> founded pilates to help bed ridden patients begin injury recoveries. Bed ridden or not, I have my share of injuries and imbalances. By focusing on moving from your powerhouse, Pilates helps teach your body proper alignment and movement using your core muscles- a powerful idea.
I first tried Pilates when my mom gifted me with a private session with her teacher. Walking into a Pilates studio is intimidating. There are a myriad of strange machines with straps and springs. There are all sorts of balls and oddly shaped equipment.
My first lesson was an awakening. I had never felt that sense of control while feeling so out of control.
####Using your powerhouse
If you had a powerhouse why would you not use it? Well, good news, you do! The powerhouse idea in Pilates establishes the base of every movement. Imagine a rectangle going from your pelvic floor to your shoulders enclosing everything in between. That is your powerhouse. Deep inside your core you have hugely strong and expansive muscles that pull everything from your pelvis, spine, and organs together. Pilates teaches you to initiate each movement with that base and then engage the isolated muscles.
Moving from the powerhouse is a powerful idea. By requiring that movement start and end in the center of the body- Pilates teaches you to move with utmost control, focus, and breath.
###Pilates for Men, the book
After some major injuries I came back to Pilates with the hope of becoming injury free. I failed then and that is why I return once again.
The goal this time is much more substantial and focused. My overriding goal is to become injury free- and stay that way. After looking through the book I understood that by making it to the end I would achieve that goal. So I set up a fake goal. I want to become an advanced Pilates and Yoga dude. In the process I hope to rid myself of injuries and gain a new sense of bodily balance.
###Dunking
In the end, I really want to dunk.
The basketball rim sits at 10 feet high. The ball either goes in or it doesn't There is some fantasy aspect about dunking. The image of flying high to pack it down is unyielding. Always a little bit higher. You either get there, or you fail.<file_sep>/readme.md
**A dynamic static website running on Pelican (Python) with data visualizations via IFTTT/GoogleDocs.**
## Intro
This site runs on [Pelican](http://docs.getpelican.com), a Python powered static site generator. It is served on GitHub Pages, via [CloudFare](https://www.cloudflare.com/) ([with easy & free SSL](https://sheharyar.me/blog/free-ssl-for-github-pages-with-custom-domains/)).
**Homepage**: The homepage content changes regularly. The data is stored in Google Docs (via [IFTTT](http://ifttt.com), [this tutorial](http://jlord.us/blog/your-own-instagram.html), and [a custom Pelican plugin](https://github.com/joehand/joeahand/tree/master/plugins/pelican_gdocs)). Data is processed on my server (or computer). I have a cron job to generate the site daily.

**Charts**: Coffee chart is made using a `<table>`, CSS, and lots of Joe. Daily Steps chart is made with the [Chartist.js](http://gionkunz.github.io/chartist-js/index.html) library.
**Design**: The Fonts are Garamond and Open Sans, served from Google. I used [Pure CSS](http://purecss.io/layouts/) as my css foundation.
## Data Gathering
All of my data comes through Google Spreadsheets. The plugin (`pelican_gdocs`) handles all of the data (and cleans/organizes it).
1. Set Up [IFTTT](http://ifttt.com). You can send your tweets to a google spreadsheet using [this recipe](https://ifttt.com/recipes/112226-save-your-tweets-in-a-google-spreadsheet). Check out other recipies. My coffee chart is done using the [Do Button App](https://ifttt.com/products/do/button), which adds a coffee button to my phone's homescreen.
2. Gather your spreadsheets!
3. Use plugin to clean and organize your data & jinja to format it.
## Development
Interesting in using this site to make something of your own? Great! Feel free to open an issue if you have a question.
Things you'll need (not in this order):
* Python
* Pelican
* A Computer
You can copy my theme, but it will break without the GDocs plugin as well.
## Resources & Thanks
* [Pelican](http://docs.getpelican.com)
* [Pure CSS](http://purecss.io/)
* [Chartist](http://gionkunz.github.io/chartist-js/index.html)
* [JLord - Your Own Instagram](http://jlord.us/blog/your-own-instagram.html)
* [CodePen Thing](http://codepen.io/hackthevoid/pen/AIoba/)
* [Google Fonts](https://www.google.com/fonts)
* [SSL w/ GitHub Pages & Cloudfare](https://sheharyar.me/blog/free-ssl-for-github-pages-with-custom-domains/)
<file_sep>/content/new-music.md
Modified: 2014-02-09T03:46:54.881000
Status: published
Date: 2011-07-11T00:00:00
Slug: new-music
Title: Finding New Music
Category: uncategorized
Kind: article
###Enjoying the Music
I listen to music a lot. While I am not a huge music buff, I like to have stuff playing while I work, cook, clean, or just hang around my house.
Music becoming digital has led to a near infinite selection. Music listeners love this. But it also means more choices, probably [too many choices](http://www.nytimes.com/2010/02/27/your-money/27shortcuts.html). In my search for music, I would go through the same one of these three directions:
* Find song I want to hear, use iTunes Genius.
* Select lots of songs I do not own using GrooveShark.
* Choose artist I like, use Pandora.
All of these services are great. But each has its own drawback. Worst of all, none of them really lead to new music. Reading the blog endorsing the power of new music encouraged me to start my search. I wanted to find something that was not time consuming, had plenty of new music, and some old favorites mixed in. After trying a variety of sources, I finally found the goldmine.
#### The Oldie-But-Goodie Sources
My usual sources for music are great. But they make it hard to find new music.
##### iTunes Genius
The limits and benefits of Genius do not take long to figure out. Great for listening to genres of music you own. Not good for finding new music.
I love the iTunes Genius button. Since they came up with it I basically stopped making playlists. That combined with the time my music library got wiped out losing all my old playlists and ratings.
I use genius when I know what I want to listen to. Let's say I go with a classic like *Crosstown Traffic* by <NAME>. iTunes genius gives me a nice selection of rock mixed with some newer pop stuff. All songs I like, definitely a good playlist. But the problem-- I need to own all the music. So then I move on to GrooveShark.
##### GrooveShark
For people who do not know about GrooveShark, I encourage you to check it out now: [GrooveShark.com](http://grooveshark.com/). Two things about GrooveShark encourage me to use it over and over again.
1. Create playlists from many many songs I do not own.
2. Find music from lists of popular songs.
I mostly use this when I know what I want to listen to. Discovering new music on the poplar list is hit or miss. Otherwise you have to know which new music to search for, defeating the point.
GrooveShark also has a radio feature, but in my experience it does not compare to Pandora.
##### Pandora
Of course I have a **Jimi Hendrix** station of Pandora. And the station is great. But the music stays along the same lines as iTunes. I get some music that I do not own and perhaps a few new songs.
If I want to branch out I can start a station with a newer artist. Without a doubt, I have discovered great new music through Pandora. However the stations get stale regardless of where you start. A computer can only reach so far into new genres. I also find myself hitting skip until I find a comfortable song, defeating the whole point.
After creating about 30 Pandora stations I became frustrated and began my search.
### The Search for New Music
The above three sources of music have provided most of my music the last two years or so. I rarely reach out to find new music. When I started, I realized I had no clue where to turn.
#### Asking Friends
None of my friends really seek out new music. Lots of people do, I just do not know them. If you know the right people this will probably be a great source.
For decades new music gained popularity through the radio, so I moved there.
#### Streaming Radio
I do not own a radio, limiting me to streaming radio. Luckily most radio stations stream now.
I love advertisements as much as the next person. So, I chose to stick with public radio streams only. My dad works in public radio so he had plenty of good suggestions for me:
* KEXP
* The Current
* WXPN
* WBEZ
* NPR Music
* Lots more...
Each station offers different types of music during the day. Without knowledge of what shows were closer to the kind of music I enjoyed I became frustrated. Plenty of stations also offer streaming of individual shows that I could match with my interests. That conflicts my goal of being time efficient though.
#### Podcasts
The jump from streaming radio to podcasts was easy. Lots of stations offer their shows as podcasts. But once again I ran into the problem of too many choices. Tons of music podcasts fill the iTunes directory.
Staring at the list of top music podcasts in iTunes is a strange experience. I started listening to some from the radio stations I enjoyed more. The experience was again hit or miss. Unfortunately, somewhere in the strange mix of podcasts in the top 10, I missed *NPR's All Songs Considered.*
### The Goldmine
#### All Songs Considered
After downloading and listening to some other podcasts, I returned to the top 10 disappointed and looking for more. Finally I found it, ***NPR's All Songs Considered***: the perfect combination of new music and old favorites spiced up with great stories and music knowledge.
I am really happy with the podcast because not only do I get to hear new music, but the hosts have a vast understanding of the music. Unlike Pandora, the podcasts can jump from one genre to another while still having a common theme. And in the process of hearing music I am also learning about music-- the best of both worlds!
I have listened to about 30 of the 300 old episodes and am looking forwards to the rest. Best of all, there is a 24/7 music stream if I ever get tired of the commentary.<file_sep>/content/pages/reading.md
Template: reading
Title: Reading
slug: reading<file_sep>/content/the-habit-of-habits.md
Modified: 2014-03-11T17:41:00.855802
Status: published
Date: 2012-06-18T00:00:00
Slug: the-habit-of-habits
Title: Creating the habit of creating habits
Category: note
Kind: note
Creating a habit in a tricky thing. At first it seems so simple. You wake up and floss your teeth. Boom, done. Next day - you almost forget but then remember, teeth flossed. Then the third day you may forget or put it off until the day it done. And eventually you keep putting it off and it does not get done.
Starting a new habit is not something you can just jump into. You must slowly approach it so as to not scare it off. Then you need to coddle it, never let it leave your side for a month. After a month it starts to grow. But it can still escape if you are not careful. Every week you must reconnect with it. Remind yourself why you are building this habit and what you hope to achieve. After a couple months your habit may finally blossom.
There is a science to habits, literally. There are millions or even billions of dollars in creatings habits. Companies try to get you in the habit of buying their product. Grocery stores get you in the habit of buying a treat, just this one time, in the checkout line. If you approach habits with the same careful scientific mindset, you can get in the habit of creating habits.<file_sep>/themes/joe/templates/blog/partials/_post_full.html
<section class="post-full" data-id="{{post.id}}">
<header>
{%- if post.kind == 'article' %}
<h1 class="title"><a href="{{url_for('blog.post', slug=post.slug)}}">{{post.title}}</a></h1>
<h5 class="category small-caps"><a href="{{url_for('.category', category=post.category)}}">{{post.category}}</a></h5>
{%- elif post.kind == 'note' %}
<h2 class="title"><a href="{{url_for('blog.post', slug=post.slug)}}">{{post.title}}</a></h2>
<h5 class="category small-caps"><a href="{{url_for('.category', category='note')}}">{{post.category}}</a></h5>
{%- if post.link_url -%}
<h5 class="link_url"><a href="{{post.link_url}}"><span class="useicons"></span><span class="link_url">{{post.link_url|get_domain}}</span></a></h5>
{%- endif -%}
{%- elif post.kind == 'page' -%}
<h1 class="title"><a href="{{url_for('blog.post', slug=post.slug)}}">{{post.title}}</a></h1>
{%- endif -%}
</header>
<article class="content">
{% if post.content %}{{post.content|markdown|safe}}{% endif %}
</article>
{%- if post.kind != 'page' %}
<footer class="post-footer">
<a class="small-caps" rel="bookmark" title="Permalink" href="{{url_for('blog.post', slug=post.slug)}}">
<span class="useicons"></span>
<span class="pub_date">{{post.pub_date.strftime('%B %Y')}}</span>
</a>
</footer>
{%- endif %}
</section><file_sep>/content/cookies-happiness.md
Modified: 2014-03-11T17:40:25.713342
Status: published
Date: 2011-07-22T00:00:00
Slug: cookies-happiness
Title: Cookies and Happiness
Category: uncategorized
Kind: article
***Baking cookies and finding happiness are mostly the exact same thing.***
I love baking chocolate chip cookies. For years I have worked to make my cookies better: slightly varying the amount of one ingredient or another, changing the baking time or temperature. Sometimes I fail terribly but most lead me closer to the ideal form of the chocolate chip cookie I am searching for.
Making cookies involves combining certain ingredients in a certain manner and then acting on that mixture by putting them in the oven. I really like brown sugar and chocolate chips. I also like pizza. If I decided to make cookies replacing flour with pizza and the butter and eggs with more sugar and chocolate, I will end up with a mess in my kitchen and no cookies. I can also create the perfect combination of the necessary ingredients then put them in the microwave to bake. Once again, no cookies and messy kitchen. Perfecting cookies requires perfecting the two essential parts: right ingredients and right baking.
")
Sadly humans have not always known how to make cookies. We learned, we experimented, and even still some people like one type of cookie over another or less sugar and more chocolate. With happiness we each have our own brand of happiness. But just like you cannot make cookies from pizza, you cannot make happiness from anger.
We all have a common cupboard from which we pull our ingredients for happiness. Pursuing happiness entails learning which ingredients suit you and acting on those ingredients correctly. ***Right mind and right action.***
>The first step in seeking happiness is learning. We first have to learn how negative emotions and behaviors are harmful to us and how positive emotions are helpful. >>**The Dalai Lama**, The Art of Happiness <small>(p 38)</small>
###The Power of Our Mind
In my definitive guide on [the happiest place on Earth]({{ site.production_url }}/2011/happiest-place/) I told you about your superpower. Humans have the amazing power to change how their brain works and control our own minds. Pretty fascinating really, if you like that neuroscience stuff. The process of learning builds new neural pathways within our brain. In English, that means when we do something a lot our brain gets more efficient working in that way.
Think about the brain as a network of roads of all sizes and shapes. Our habits, the things we are really good at, use the superhighways that we build over time by doing that action again and again. Meanwhile, if we go to learn a new foreign language we start the process of creating a new road from scratch-- or perhaps you know a few words and have a trail through the woods.
####The Highways of Happiness The first step in discovering happiness is learning. We have taught our brain throughout life to respond in certain ways. If we regularly get angry or upset-- that is the highway in our brain. Meanwhile, the highway of compassion or kindness may be non-existent. Without building the habits of happiness producing thoughts, we cannot expect to be happy.
In *The Art of Happiness*, The Dalai Lama describes his reaction to tragic or sad events as causing a "ripple on the surface of an ocean" which does not effect him deep down. He has a happiness super-highway from a lifetime of training. But if you have a disposition towards anger, an event that upsets you skips the happy highway. Instead your thoughts opt for the anger highway because it is the path of least resistance. What could be small ripples turns into bigger and bigger waves the further it proceeds down that road.
We must habituate ourselves to create new mental pathways while destroying the old edging us ever closer to the right attitude and therefore happiness.
>Happiness depends more on the inward disposition of mind than on outward circumstances. >>**<NAME>**
###Happiness as a Math Problem
The mathematical beauty of baking attracts me to making cookies. I love to understand how certain ingredients combine with others. I learned how baking soda or baking powder create different reactions, how a yolk or egg whites change the cookies. Then comes the ultimate problem of finding the correct dough size, oven temperature, and baking time.
Developing the right attitude of happiness works the same way. With every action or thought we have the chance to either lead us closer or move away from our goal. When learning how to be happy, we must realize two things:
* Positive thoughts and actions generate more positive thoughts and actions. * Negative thoughts and actions propagate to make us angrier, which leads to more negative actions.
###Act Now
For every negative thought you discard and turn into a positive, they do not simply cancel out. Wake up the day and start with a positive thought rather than "ugh morning." This one thought will build on itself turning the day around. Looking at our attitude in relation to others we see the true power of the math of happiness.
Just like in our own minds, negative emotions hurt others and cause more waves of sadness throughout the world. Whereas right mind, right action spreads more happiness.
***So which side are you on? Do you want to make your family, your community, and the world more miserable? Or do you want to propagate joy?***
###[Bake some cookies]( {{ site.production_url }}/cookies/) and spread happiness.<file_sep>/content/clean-slate.md
Modified: 2014-03-11T17:32:29.099478
Status: published
Date: 2011-07-08T00:00:00
Slug: clean-slate
Title: Starting with A Clean Slate
Category: note
Kind: note
Washing away the old and preparing for the new. Before on this page there were a couple blog posts, but nothing worth talking about. <NAME> [posted today](http://sethgodin.typepad.com/seths*blog/2011/07/put-your-name-on-it-1.html) about *putting your name on it*:
>"If you can't sign it, don't ship it."
The things I wrote before were not worth signing. This decreased my motivation to write new posts. So here is to starting fresh--creating something where I will proudly display my seal of approval.
###Riding the wave.
This fresh start comes on the wave of another new project. Two weeks ago I began a project at [SmilingJoe.com](http://smilingjoe.com) called **Experiments in Kindness**. I feel a strange force pulling me forwards. Bringing me towards new opportunities, encouraging me to explore new ways of thinking. It is exciting yet scary. I only hope I can stay above the surf.
But getting knocked down may be a better experience than riding the wave itself. So then I look forwards to whatever lies ahead.
>"Failure is the opportunity to begin again more intelligently." > >***<NAME>***<file_sep>/content/happiest-place.md
Modified: 2014-03-11T17:34:41.972546
Status: published
Date: 2011-07-19T00:00:00
Slug: happiest-place
Title: The Only Place In The World Where You Can Find Happiness
Category: uncategorized
Kind: article
***Welcome to the most definitive guide ever about the happiest place on Earth. Inside I offer a simple three step fix to finally reach the ultimate goal in life.***
After reading through top ten list after top ten list, a book about the happiest countries on Earth, surveys about happiness, and even visiting some happy places, I finally found it, <strong>the happiest place in the World</strong>.
Searching Google for ***happiest place in world*** yields almost three million results. Millions of top ten lists fight to reach the top ten spots on the first search page. A self-proclaimed grump, <NAME>, even took it upon himself to fly around the world searching for the world's happiest place in the book *[The Geography of Bliss](http://www.amazon.com/gp/product/044669889X/ref=as*li*qf*sp*asin*tl?ie=UTF8&tag=smijoe-20&linkCode=as2&camp=217145&creative=399369&creativeASIN=044669889X)</a>*.
Nine different countries and one happy US city later he concludes the destination of happiness can be reached through many paths: >Money matters, but less than we think and not in the way that we think. Family is important. So are friends. Envy is toxic. So is excessive thinking. Beaches are optional. Trust is not. Neither is gratitude.
With so many different ways to happiness how can you know what path is right for you? Perhaps it is not about the journey but the people you meet along the way. The author reconsiders one of the people he met on his journey and decides "happiness is completely and utterly intertwined with other people... happiness is not a noun or verb. It's a conjunction. Connective tissue." So it must be more about the people than the actual place. Now we are left to wonder how to find these relationships that make us happy.
Even after trotting the globe trying to discover happiness he ends his journey "not 100 percent happy. Closer to feevty-feevty." Unfortunately, no one informed him of the true, happiest place on earth. You cannot find it on any of the top ten lists on Google. And no, it is not Disney World.
###The Way to Happiness
I offer you a simple three step process for the way to happiness:
1. Buy a plane ticket to Denmark. 2. Pack your stuff, fly there. 3. Arrive in world's happiest place, be happy.
Foolproof right? Year after year Denmark ranks as the "[world's happiest nation](http://news.bbc.co.uk/2/hi/7487143.stm)." So anyone who goes there must be happy. It's the rules.
However, you cannot suddenly become happy by just arriving in Denmark and any fool knows this. **To find true happiness, it must come from within**. Sure you can enjoy a beautiful place, wonderful food, or a great concert. But unless you have the right attitude, nothing external will make you happy.
>There is no way to happiness. Happiness is the way. >>**<NAME>**
###Cultivating Happiness
I talked briefly at the end of my last post about planting the seed for this attitude. First you must be aware of what you can and cannot control. Things happen. Great things, terrible things. Unless you are a wizard or have a time machine you are powerless to change the past. But you still have a great power within you.
You have the power to control your thoughts. Every event summons a reaction within your mind. Start to nurture the attitude that you have the superpower of authority over your mind. Be attentive to your immediate response to any external event. Here is your homework assignment:
* Acknowledge your knee-jerk thoughts, becoming aware of them as they develop. * Remember that you have a superpower. With that power you can control your mind. * Finally, let the thoughts pass by, as if they were a small cloud in an otherwise blue sky.
#####Keep the Sky Blue
*Cultivate a blue sky in your mind*. Actively strive to keep that sky clear of nasty thoughts and intentions-- ***for that is the only place in the world where you can find happiness***. See the clouds, say hello if you like, then watch as they drift out of sight.
####Find Your Smile, Find Happiness
On the author's final leg of his journey, he recounts a serendipitous meeting with the airport bartender. The bartender's name, of course, "Happy." Without hesitation the author asks him what the secret of happiness is. His answer, simple: >"Just keep on smiling. Even when you're sad. Keep on smiling."<file_sep>/content/dynamic-static-pelican.md
Title: Building a Dynamic yet Static Website with Pelican & IFTTT
Slug: dynamic-site-pelican-ifttt
Date: 2015-07-31 08:58:30
Tags: pelican, python, ifttt
Category: programming
Summary:
Static websites seem all the rage right now, and for good reason. A static site is a website that does not have a server creating pages on every request. They are easy to host (for free), fast, and help you offload all the server's work onto the computers visiting your site (d'oh!).
Static websites also have some serious downsides. Foremost, the content is static - only updated when you take action. If you want *dynamic* content on the site, you have to rely on APIs and Javascript, adding to the page size and load times. In this trade off, you are passing the work on to the user rather than doing it on the server, perhaps an unwise trade off.
Don't panic! You can have both! If you visit my [homepage](https://joeahand.com) you will notice dynamic content (twitters, fitbit steps, cups of joe drunk) - yet there are no API calls. How is this possible? It must be Magic! Not here, sorry.
The answer is Python, of course. You can [fly with Python](https://xkcd.com/353/) and make dynamic-static websites! *(You can build this with other languages, but you can't fly with them... In fact, I am flying as we speak! Probably thanks to Python.)*
### Quickstart (how I did it):
1. Set up [IFTTT](https://ifttt.com) (If This Then That) and start sending data to Google Spreadsheets ([sample recipe](https://ifttt.com/recipes/112226-save-your-tweets-in-a-google-spreadsheet)).
2. Grab the data with a custom [Pelican](http://getpelican.com) [plugin](https://github.com/joehand/joeahand/tree/master/plugins/pelican_gdocs), process it, and output to templates.
3. Make the data pretty.
4. Create a cron job to update data & regenerate site regularly (mine runs once daily).
## 1. IFTTT Awesomeness
If you haven't checked it out, IFTTT is great. When something happens (new tweet posted) IFTTT will do something else (add a row to a spreadsheet). I started using it to automatically send emails to loved ones when I arrived at airports (via check ins on FourSquare).
IFTTT also has a group of apps called DO (Button, Note, Camera). These apps skip the IF and just DO something. When I have a cup of joe, I tap the DO button on my phone to record it (unfortunately, as I just found out, this doesn't work on airplanes - remind me to record this cup of coffee I drank over Idaho).
All of my homepage data goes through IFTTT to Google spreadsheets:
* Drink Coffee > Tap Do Button > Add to Spreadsheet
* Joe Tweets > Add to Spreadsheet
* Pocket Article Archived > Add to Spreadsheet
* Instagram Picture Posted > Add to Spreadsheet
* Fitbit Daily Log > Add to Spreadsheet
Now think of the possibilities with all that data in spreadsheets! For now, let's make it into a nice dynamic-static website.
## 2. Grabbing Spreadsheet Data w/ Pelican Plugin
***Advisory:*** *We are going to start getting into some code here. I'm going to assume that you have a running [Pelican](http://getpelican.com) website. If not, check out the [Pelican getting started](http://docs.getpelican.com/en/3.6.2/quickstart.html) tutorial, it's pretty friendly.*
Once the data is in Google Docs, how do we get it out? Turns out, its quite simple. Google has an option to publish spreadsheets as csv files:
File > Publish to Web > Publish as CSV
Now that you have a public csv, we can grab it with the great Python `requests` library, transform it into a Python object. This will all be a part of our [Pelican plugin](http://docs.getpelican.com/en/latest/plugins.html). My plugin is pretty customized (for processing the data) but you can copy the guts of it.
***Note:*** *You can hop over to [GitHub](https://github.com/joehand/joeahand) to check out the full code at any time (here is [plugin part](https://github.com/joehand/joeahand/tree/master/plugins/pelican_gdocs)).*
### Getting a Published CSV from Google
:::python
import requests
response = requests.get(PUBLIC_CSV_URL)
content = response.text
And now you have a csv in `content`. Yes, its really that easy.
### Creating Python List from CSV
Unfortunately, right now the `content` variable is just a big string of data, so it is not very useful.
Now we will create a Python list, with each item in the list being a row of the spreadsheet. Our keys will be the spreadsheet headers.
***Note:*** *This code is for Python 3. If you aren't on 3, you should really switch.*
:::python
import csv
lines = content.splitlines()
header = [h.strip() for h in lines[0].split(',')]
data = list(csv.DictReader(lines[1:], fieldnames=header))
Python's `csv` module makes this pretty simple. Each item in the data list is now a Python object, for example this is a row from the Instagram spreadsheet:
{
'Instagram_Link':'http://instagram/something',
'Photo_URL':'http://instagramcdn.com/url',
'Caption': 'This is a picture',
'Date':'June 21, 2015'
}
Captain, we have data (I'm writing this on a plane, I apologize)!
## 3. Visualizing Data
From here, what you want to do will depend on your data and how you want to show it. The plugin will allow you to access the data in your templates and then you can let your imagination run wild!
Using Jinja, outputting to a javascript object is fairly easy. For example, here is the data output for my Fitbit steps:
var steps_data = {
labels: [
{%- for row in gdocs_data.steps | reverse -%}
'{{row.Date}}',
{%- endfor -%}
],
series: [
[
{%- for row in gdocs_data.steps | reverse -%}
{{row.TotalSteps}},
{%- endfor -%}
]
]
};
The label/series organization is how Chartist.js likes data. But you can make it whatever kind of js object you like.
My coffee visualization uses a table created with Jinja loops. There are probably some other options depending on what kind of data you want to visualize.
## 4. Automatically Updating the Site
One of the biggest downsides to static sites is the fact they only update when you change the content (and then usually push the site somehow). To get around this, you need a process to run automatically on a schedule and update the website. This is exactly what [cron jobs](http://crontab.org/) are for.
Pelican also makes it fairly easy to push content with a single command (check out the make script). For my site, I use `make github` which grabs all the latest data, rebuilds the site, and pushes it to Github Pages. A similar process should work for the other `make` commands.
In my case, here is what I did:
1. Set up a [Digital Ocean server](https://www.digitalocean.com/?refcode=94657bdeab0c).
2. Clone my pelican repository.
3. Set up the `make github` command and make sure it all works manually.
4. Create a cron job to run daily (at midnight, if you must really know).
5. Profit!!?!!
The hardest part is step 3 and really depends on your server. I used a Python virtual environment but that didn't play very well with the PIL library. Safe to say, it was a pain and I recommend you not do what I did. In the case I go through this again, which is likely, I'll do a better job of documenting my steps the next time (sorry!).
## Boom - Static Dynamic Site!
So there you have it! A static dynamic site with Python!
Let me know what you think on [Twitter](https://twitter.com/joeahand). If you want to see all the code it is on [GitHub](https://github.com/joehand/joeahand).
<file_sep>/themes/joe/templates/_base.html
<!DOCTYPE html>
<html>
<head>
<script data-cfasync="false" type="text/javascript">
var host = '{{ SITEURL | replace('https://', '') }}';
if ((host == window.location.host) && (window.location.protocol != "https:"))
window.location.protocol = "https";
</script>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta http-equiv="ClearType" content="true">
<meta name="viewport" content="width=device-width, initial-scale=1">
<!-- DNS prefetch -->
<link rel="dns-prefetch" href="//cdnjs.cloudflare.com">
<link rel="dns-prefetch" href="//cdn.joeahand.com">
<title>{% block title %}{{ SITENAME }} {% if SITESUBTITLE %}| {{ SITESUBTITLE }}{% endif %}{% endblock %}</title>
<meta name="description" content="{{ SITESUBTITLE }}">
<!-- Styles -->
{% assets filters="pyscss,cssmin", depends=('**/*.scss'), output="main.min.css", "scss/main.scss" %}
<link rel="stylesheet" href="{{ SITEURL | replace('https://', '//') }}/{{ ASSET_URL }}">
{% endassets %}
<!-- WebFonts -->
<script data-cfasync="false" src="//cdnjs.cloudflare.com/ajax/libs/webfont/1.6.4/webfontloader.js"></script>
<script data-cfasync="false" >
WebFont.load({
google: {
families: [ 'EB+Garamond::latin', 'Open+Sans:400italic,400,700,300:latin' , 'Roboto+Mono::latin' ]
}
});
</script>
{% block head_scripts %}
{% endblock %}
{% if FEED_ALL_ATOM %}
<link href="{{ FEED_DOMAIN }}/{{ FEED_ALL_ATOM }}" type="application/atom+xml" rel="alternate" title="{{ SITENAME }} Full Atom Feed" />
{% endif %}
{% if FEED_ALL_RSS %}
<link href="{{ FEED_DOMAIN }}/{{ FEED_ALL_RSS }}" type="application/rss+xml" rel="alternate" title="{{ SITENAME }} Full RSS Feed" />
{% endif %}
{% if FEED_ATOM %}
<link href="{{ FEED_DOMAIN }}/{{ FEED_ATOM }}" type="application/atom+xml" rel="alternate" title="{{ SITENAME }} Atom Feed" />
{% endif %}
{% if FEED_RSS %}
<link href="{{ FEED_DOMAIN }}/{{ FEED_RSS }}" type="application/rss+xml" rel="alternate" title="{{ SITENAME }} RSS Feed" />
{% endif %}
{% if CATEGORY_FEED_ATOM and category %}
<link href="{{ FEED_DOMAIN }}/{{ CATEGORY_FEED_ATOM|format(category.slug) }}" type="application/atom+xml" rel="alternate" title="{{ SITENAME }} Categories Atom Feed" />
{% endif %}
{% if CATEGORY_FEED_RSS and category %}
<link href="{{ FEED_DOMAIN }}/{{ CATEGORY_FEED_RSS|format(category.slug) }}" type="application/rss+xml" rel="alternate" title="{{ SITENAME }} Categories RSS Feed" />
{% endif %}
{% if TAG_FEED_ATOM and tag %}
<link href="{{ FEED_DOMAIN }}/{{ TAG_FEED_ATOM|format(tag.slug) }}" type="application/atom+xml" rel="alternate" title="{{ SITENAME }} Tags Atom Feed" />
{% endif %}
{% if TAG_FEED_RSS and tag %}
<link href="{{ FEED_DOMAIN }}/{{ TAG_FEED_RSS|format(tag.slug) }}" type="application/rss+xml" rel="alternate" title="{{ SITENAME }} Tags RSS Feed" />
{% endif %}
</head>
<body class="{% block body_class %}{% endblock %}">
<div role="document" class="document">
{%- block siteheader -%}
<div class="header">
<div class="site-header pure-menu pure-menu-horizontal pure-menu-fixed">
<a class="pure-menu-heading small-caps" href="{{ SITEURL }}">{{ SITENAME }}</a>
{%- include "partials/_nav.html" %}
</div>
</div>
{% endblock siteheader -%}
{%- block main %}
<main role="main" class="main pure-g">
<div class="pure-u-2-24 pure-u-lg-1-5"></div>
<div class="page-content pure-u-20-24 pure-u-lg-3-5">
{%- block content -%}
{%- endblock content %}
</div> <!-- .page-content -->
<div class="pure-u-2-24 pure-u-lg-1-5"></div>
</main> <!-- main -->
{%- endblock main %}
{% block sitefooter %}
<footer role="contentinfo" class="site-footer l-box pure-g is-center">
<div class="pure-u-2-5 pure-u-md-1-3">
<span class="copyright">
{{AUTHOR}} ·
{% if COPYRIGHT_LINK %}
<a href="{{COPYRIGHT_LINK}}">© {{ YEAR }}</a>
{% else %}
<span>© {{ YEAR }}</span>
{% endif %}
</span>
</div>
<div class="pure-u-1-5 pure-u-md-1-3">{% include "partials/_social.html" %}</div>
<div class="pure-u-2-5 pure-u-md-1-3">
<!-- <span>
{% if ARCHIVES_URL %}
<a href="{{ SITEURL }}/{{ARCHIVES_URL}}">Archive</a>
{% elif ARCHIVES_SAVE_AS %}
<a href="{{ SITEURL }}/{{ARCHIVES_SAVE_AS}}">Archive</a>
{% endif %}
{% if FEED_DOMAIN and FEED_ALL_ATOM %}
· <a href="{{ FEED_DOMAIN }}/{{ FEED_ALL_ATOM }}">RSS</a>
{% endif %}
</span>
· -->
<span>
<a href="{{ SITEURL }}/about/#about-site">Made By <span class="useicons"></span></a>
</span>
</div>
</footer>
{%- endblock sitefooter %}
</div> <!-- document -->
<script src="//cdnjs.cloudflare.com/ajax/libs/chartist/0.9.1/chartist.min.js"></script>
<script data-no-instant src="//cdnjs.cloudflare.com/ajax/libs/instantclick/3.0.1/instantclick.min.js"></script>
{%- block footer_scripts %}
{%- endblock footer_scripts %}
{% include 'partials/_analytics.html' %}
</body>
</html>
<file_sep>/content/routine.md
Modified: 2014-02-09T03:43:10.299000
Status: published
Date: 2012-07-14T00:00:00
Slug: routine
Title: Bookend The Day With Routines
Category: better human
Kind: article
Lately I have been honing in on my routines. This started with the idea of focusing on one habit each month. The first habit I developed was doing a daily morning walk. Since starting, I have habitually gone for a walk each and every morning. And I love it!
### Building Habits
I have been struggling with my habit this month. I am trying to meditate every day, and I have failed more often than not. [This post](http://artofmanliness.com/2011/09/05/bookend-your-day-the-power-of-morning-and-evening-routines/) on The Art of Manliness made me think about developing my morning and evening routines to build my habit each month.
### My Routines
Thinking through my routines today, I created three lists:
* Ideal Morning and Evening Routine
* First Step Morning and Evening Routine
* Current Morning and Evening Routine
Moving backwards made me realize how far I have to go to reach my ideal routines! Right now my evening routine is nonexistent. So, I have some work to do.
### Baby Steps
When building habits, taking small steps is the only way to success. I also created some **rules** for the morning and evening I have to stick by, regardless of where I am in my routine. This will help me address worries I had (feeling rushed meditating, looking at internet too long at night, etc.).
#### Morning Routine
My morning routine will look like this starting tomorrow:
* Walk
* Write [750 words](http://750words.com)
* Eat Real Food (Meat and Veggies) & Clean Dishes
* ***Meditate***
* Teeth Care (Floss/Brush)
* Dress, Etc.
The only new thing here is meditation. I am also switching the order of eating and writing my 750 words. I have found it a bit hard to write after eating, I seem to get easily distracted by the internet during eating and that delay's my writing.
##### Morning Rules:
1. No internet until 750 words are written.
2. Clean dishes before meditation.
3. Start meditation by 7:30am.
4. Determine topic for evening writing (during 750 words).
My morning routine has been pretty stable over the last year. Whenever I add stuff in (morning walk), it seems pretty easy for me to get it done. I have a lot of willpower in the morning!
#### Evening Routine
On the other hand, I struggle with my evening routine. Between having a varied schedule for dinner, cooking, crossfit, or going out and not being sure what I want to do - I end up spending a lot of time just surfing the internet.
I hope by establishing a list of things I **must** do and times for when things to be done, I can do a better job of this. This list is going to be much shorter because I am just jumping into the whole evening routine thing. Baby steps!
* Clean up dishes after dinner
* ***Focused writing on a single topic***
* Review goals, habits, day, life
* Read
The focused writing is something new. I always *intend* to write a blog at night, but find it hard to figure out the topic and then focus. That is where morning rule #3 comes in!
The other items in this list are things I have done with some regularity. But I hope to do them daily now (off and on...).
##### Evening Rules:
1. No electronics after 8pm.
2. Start review by 8:30pm.
3. Clean all dishes, put away clothes, etc, before reviewing goals.
Thats it! I have a ways to go to reach my ideal routine. For now, one step is enough.<file_sep>/content/poke-the-box.md
Modified: 2014-03-11T17:27:30.652084
Status: published
Date: 2011-04-16T00:00:00
Slug: poke-the-box
Title: Poking the Box
Category: uncategorized
Kind: article
I have been struggling to start. When I do start, I lack the patience to finish. This blog for example: I excitedly created the site, wrote the first post, and then waited, but nothing happened. For some reason the blog did not blossom like I had imagined.
<NAME> knew how to start his writing career. At thirteen, Franklin started by rewriting other's work. He never stopped. Shortly after he was submitting letters to the local paper under a fake name. Throughout his life he pushed new ideas forward. <NAME> initiated, just look at the list: successful printing shop, library, volunteer firehouses, safer fireplaces, bifocals, electricity. By poking the box, he became one of the most influential people of his era.
#### Poke the box
In his book, <NAME> stresses starting and shipping - a blog post, a worldwide revolution, or helping a stranger unprompted - ship something new however large or small. The default, the safe thing to do is nothing. If you create enough then you will fail. Leading the reader by the hand and acknowledging all bad that may come, the book forces the reader take action. Without failing there cannot be success.
Throughout reading the idea of creative destruction was in my mind. <NAME> proposed entrepreneurs drove the growth of the economy through creative destruction. Entrepreneurs are driven to markets where monopolies are coasting on old technological innovation. All of the new entrants attempt to create a more efficient technology. One successful innovation topples the old monopoly while creating a new one. The growth of the economy depends on the number and success rate of the entrepreneurs. With barriers to innovation at a low, this idea is more powerful than ever.
### Creative Destruction of the self
Creative destruction has always described the economy, but what about a person? When I lift weights- that is creative destruction- I am destroying my muscle fibers to encourage them to grow back stronger. This is what Seth proposes.
#### Create a new you: start
This week I made the goal of doing getting 100% injury free. Using my awesome book, *The Complete Book of **Pilates for Men***, I planned my first mini-goal. I scoped out the *advanced reformer on the mat workout* and set it in my sights. About half the exercises looked impossible, but not quite.
#####Start small
I started with a simple 20 minute workout every morning, the *beginner mat workout*. A huge red X went on my calendar after each completed workout, the Jerry Seinfeld method. One week later I feel a bit stronger. But I really need to push the box.
####Deliver on that new you: ship
Creating a plan and starting is good. The real goodness comes from shipping. In one month my calendar may be full of red- what then? Well, I will have a stronger core, maybe I will love Pilates or hate it. Cool, that does not sound so bad. In one year my calendar may be full of red- what then? My body will be stronger than ever. Even a third of the way through the book, my body moving in a way that was once impossible. Down the road, when I am 80, I may even be doing the stuff <NAME> did.
####Failure or success is irrelevant if you progress
In committing to do Pilates for thirty days I know I will push the box. So what if the Pilates space in my box never expands again. It has been pushed, it was not worth it, my box expanded and I moved to push elsewhere- maybe yoga. Then the process starts all over again. Always moving outwards.
I imagine myself testing to optimize my personal self. The goal of my life is to expand. Right now, I need to find where there are maximum returns, just as the entrepreneurs do in the creative destruction economy.
The markets where monopolies sit tantalize entrepreneurs. If they succeed in overthrowing the monopoly the rewards are great. They become the new monopoly. Markets where profits are small or levels of technology superior provide no motivation.
#####Where is your personal monopoly?
Part of you is stuck in routine. It refuses to change. You act the same way in certain situations, always acknowledge people in the same manner, or pick the same thing every time. Just once try act differently, see how it feels, let it soak in. Rinse and repeat.
In the economy one entrepreneur succeeds because so many others fail. If it was one entrepreneur vs one monopoly, I'd put my money on the monopoly. One million entrepreneurs and it is a different story. That is why the value comes from poking the box again and again not doing it once.
###Everyone poke together now
It is scary to think about a world where every person is as determined as <NAME> was. Scary because of the amount of astonishing things that could be done. While each person has their own box to push, it is not all that gets pushed.
####One box growing encourages another
One day someone in the neighborhood decides to push; she smile and says hello to everyone passing- a stranger smiles back. She watches his box expand as he walks away. A smile is one thing, but <NAME> reached for much more than just kindness towards strangers. At first he pushed to expand his own box, then that of the local paper, he did not stop until he made a huge dent in all of mankind.
With each small poke the boxes that enclose us grow; first the box of your house, then your neighborhood, and if you poke hard enough eventually you will push the box of all humanity further.<file_sep>/content/defaulting-reality.md
Modified: 2014-03-11T17:43:18.878396
Status: published
Date: 2011-07-20T00:00:00
Slug: defaulting-reality
Title: Dangers of Reaching The National Bullshit Ceiling
Category: uncategorized
Kind: article
The nation is reaching the <strong>National Bullshit Ceiling</strong>. Young generations are becoming completely dismissive of politics leaving a nation without leaders. Without bold truth-telling action, the nation will "default on reality". But that would just be the beginning of our problems. Future generations may become completely dismissive of politics leaving a nation without leaders.
The Daily Show recently discussed the National Bullshit Ceiling and the threat it poses to the country. Daily Show correspondent, <NAME>, warned us of the impending doom if we pass the limit:
>If we reach the point where the amount of bullshit exceeds the amount of things we will effectively default on reality.
We already see how this political culture affects our current environment. Our representatives argue with false pretenses. We disregard the truth for the sake of profit or ideology. Individuals now get identified by their parties not what they represent. However, the real harm will be passed on to future generations.
The true damage of the current political culture comes in the form of the dissatisfaction and ambivalence of young voters, which I myself am a part of. I find myself increasingly discouraged by politics. I do not see significant progress being made and am especially frustrated by how we value short-term rewards over long-term investments. I will be the first to admit, I am as guilty in my apathy as any other young voter. I have only been motivated to vote in one of the six elections I could vote it. Take a guess which one (hint: it involved the promise of change).
In the short-term, young generations will be poorly represented and discriminated against in policies. But long-term effects will be catastrophic. ___As a generation we are being raised with a distrust towards politicians___. We see a lack of ethics our leaders and develop a pessimistic attitude, sometimes completely dismissing anything political. What will happen when the young become the old and the country needs people to run it? Who should we look to as role models?
I started this post with the idea of motivating myself and other young voters to care about politics. As I continue in my exploration, I keep wondering, why should we care? The future is a long ways away, yet many of the politicians do not think beyond the past. How could they possibly help us? But it's we who need to help them.
###The Beasts of Politics
>Politics devoid of ethics does not further human welfare, and life without morality reduces humans to the level of beasts. This leads some of us to refrain from politics altogether, but politics is not axiomatically dirty. Rather, the misguided instruments of our political culture have distorted our high ideals and nobel aspirations. >>__The Dalai Lama__, How to See Yourself as Your Really Are
Our political culture is one of beasts. There is no doubting that. But by refusing to engage our leaders, we are just as guilty of driving our country towards failure. We need to take initiative and hold our leaders accountable. We need to set an example of truth telling and living with integrity. Every interaction, every discussion, every decision we make either compounds the problems or helps to solve them.
<a title='By <NAME> [Public domain], via Wikimedia Commons' href='http://commons.wikimedia.org/wiki/File:Benjamin_Franklin_-_Join_or_Die.jpg'><img width='500' style="margin:0 auto; display:block;" alt='<NAME> - Join or Die' src='http://upload.wikimedia.org/wikipedia/commons/thumb/9/9c/Benjamin_Franklin_-_Join_or_Die.jpg/500px-Benjamin_Franklin_-_Join_or_Die.jpg'/></a>
####Take Initiative, Stop the Bullshit
In the Daily Show clip, <NAME> called for action. He proposed a solution of "replacing shit with FARTS from our BUTTS." Beyond the obvious comical genius here, the Daily Show team was spot on calling for:
<ul class="no-bullet" style="position:absolute; margin-left:200px; margin-top:0px; "> <li><b class="big">B</b>old</li> <li><b class="big">U</b>ncompromising</li> <li><b class="big">T</b>ruth</li> <li><b class="big">T</b>elling</li> </ul>
<ul class="no-bullet"> <li><b class="big">F</b>actual</li> <li><b class="big">A</b>ssessments of</li> <li><b class="big">R</b>eality based</li> <li><b class="big">T</b>ruths</li> </ul>
We do not just need this from our politicians, we need this to permeate throughout our society. Our apathy encourages this culture to strengthen and infect more of our citizens and representatives. Start a revolution by having the courage to stand up and demand an end to bullshit.
#####Act with integrity, act boldly, challenge the bullshit.
The full Daily Show clip for those who need some laughs:
<object width="512" height="288" class="add-bottom" style="margin:0 auto; display:block;"><param name="movie" value="http://www.hulu.com/embed/3g4uGkZQ-7cKvGSD4KVFyw"></param><param name="allowFullScreen" value="true"></param><embed src="http://www.hulu.com/embed/3g4uGkZQ-7cKvGSD4KVFyw" type="application/x-shockwave-flash" width="512" height="288" allowFullScreen="true"></embed></object><file_sep>/content/find-smile.md
Modified: 2014-03-11T17:38:45.836433
Status: published
Date: 2011-07-23T00:00:00
Slug: find-smile
Title: Finding Your Smile
Category: uncategorized
Kind: article
***Find your smile just like you find your voice. Start by smiling a lot.***
####Me and Writing
Throughout school I never enjoyed writing. Everything always felt forced as I tried to conform to standards. I ended up writing with someone else's voice leading to terrible papers.
After writing the experiments in kindness and some personal blog posts, I realized how much I enjoyed writing -- on my terms. But I still felt I needed to work on getting my ideas down with clarity and ease. Any writer will tell you that in order to learn how to write, you write, a lot. So, six days ago I made it a goal to post here everyday for thirty days.
####Serendipity
Five posts later and I am making progress, but sometimes only with great effort and frustration. I still feel like I am ***finding my voice***. As I began writing today I struggled to start. I headed over to ***[ZenHabits](http://zenhabits.net)*** by [<NAME>](http://leobabauta.com), my favorite source for motivation. Lo and behold, a post on [finding your voice](http://zenhabits.net/voice/).
The first tip as expected, ***Write a lot***. Check. The second tip: >**Experiment boldly**. Rip off the greats, and the goods as well. Mimic and make it your own. Try and err.
I kept reading and within moments I began seeing how finding your voice parallels finding your smile. I decided to "rip off the greats... mimic and make it my own." And thanks to ZenHabits being in the [public domain](http://zenhabits.net/open-source-blogging-feel-free-to-steal-my-content/) I could do so easily.
Thank you Leo for all the great posts and inspiration. If I could only read one blog the rest of my life, I would choose ZenHabits. I encourage everyone to check out [ZenHabits](http://zenhabits.net). ***The remainder of this post was stolen from there and modified without regret.*** I lay claim to none of the hard work and words you will find below.
###Finding Your <strike>Voice</strike> Smile
Humans of any kind must find their smile.
We are writers, musicians, designers, programmers, parents, builders of anything. But we are not truly at peace with ourselves, and genuinely happy, until we've found our smile: the attitude, kindness, compassion, love, personality we use to express ourselves.
Our smile is our essence, plain for the world to see.
A fellow happiness seeker asked me how I found my smile. And I have no easy answer -- I'm not even sure I can say I've fully found my smile yet. It's a quest that doesn't seem to end -- not a Grail quest, really, but a constant retuning as the essence of who I am neverendingly changes.
But I feel I've found something that has the texture of truth, even if only a tactile approximation. I'll share some of my thoughts, but keep in mind I don't hold the answers firmly at all.
I'm learning, and I hope my learning helps yours. This is written for smile seekers, but the ideas are the same for anyone who is human.
**Smile a lot.** This is almost all I need to say, as nothing else matters without the constant practice of smiling a lot. Try smiling right when you wake up, when you are washing dishes, smile to strangers, find your enemies and smile with them, smile at nature, and smile for yourself. The sheer mass of your smiles becomes the raw matter from which to chisel your happiness.
**Experiment boldly.** Rip off the greats, and the goods as well. Mimick and make it your own. Try and err.
**Learn to be at peace with yourself.** My smile is really the happiness in my head. It's not how I assert my happiness, but how I feel my genuine happiness, in the noisy cavern of my skull. I listen to my inner peace and that's the smile I try to emulate.
Getting that happiness from your head to your face -- that's the trick. It's not easy, but again, do it often, and you'll get proficient at it. It's a rewiring of the synapses, so that your head-happiness shoots down into your face muscles and come out as a smile, as kindness and compassion. Most people don't do this enough to get good at it, and so there is low fidelity.
**Find what feels true.** You'll smile a lot, and most of it will be bullshit. You need bullshit if you want to find the truth. Sort through the bullshit until you recognize the truth, by feel, not by and logical criteria. The truth looks remarkably like bullshit.
**Find clarity.** Good smiling, it's been said often, is inner peace. If your inner peace is muddled, your smile will be. I'd recommend a self-taught course on meditation, but really I've found it's a matter of simplifying. Practice removing external desires and agendas until you have only what's needed to express a simple smile.
**Remove the negativity.** It's a process of subtraction more than addition. Most people end up with too many feelings, because they never subtract. Negative feelings get in the way of your smile, so pare it down, trimming the cynical from the bush until you're left with truth. I subtract in my head, these days, but that's from years of practice. After you react, acknowledge your feelings, and remove the negativity.
Most people also have too many detrimental feelings in their own lives to hear their own optimism. Too much is going on around them, and online, and they have no time for solitude. You can't hear your happiness, your inner peace, without solitude. Remove the negativity in your life as well.
**Use your smile.** You don't embark on a quest for your smile just for the sake of beauty -- a noble pursuit, but it's not enough.
You must use your smile. Use it to express your happiness, to help others, to change the world.
I smile for kindness in a world that's needlessly barbarous.
I smile for inner happiness to stem the tide of seeking happiness through consumerism.
I smile for contentment because too many feel a lacking.
I smile for compassion because my heart breaks at the cruelty of our species.
I smile for morality in an increasingly unethical world, especially in the growing private sector.
This is how I use my smile. How will you use yours?
#####Thanks again to <NAME> for the [original post](http://zenhabits.net/voice/) of which I mostly copied.
<!-- more end --><file_sep>/content/pages/about.md
Modified: 2014-06-18T00:00:05.796871
Status: published
Date: 2014-02-09T00:00:00
Slug: about
Title: About Joe
This page needs to be updated. For now please see my [CV]({filename}/pages/cv.md).
<!-- <hr>
I am interested in open data, community driven-development, maps, biking, and a few other things.
Denver > Portland > Santa Fe > Denver
* Spent seven and a half years biking and getting wet in [Portland, OR](http://www.youtube.com/watch?v=3PC5PDlKKIo).
* I managed data visualization projects at [Periscopic](http://periscopic.com) with clients including the Unicef, Yahoo!, GE, the Economic Policy Institute, and the Hewlett Foundation.
* Originally, I moved to Portland to attend [Reed College](http://reed.edu) where I majored in Economics and Math.
* I wrote my [senior thesis](https://s3.amazonaws.com/joehand_blog/Hand_ReedSeniorThesis_2009.pdf) on an economic model fueled by creative destruction, a topic that still interests me today.
I spend my time [reading](https://www.goodreads.com/joehand), cooking, [writing Python](https://github.com/joehand), lifting kettlebells, and walking outside. -->
<hr>
# About This Website<a name="about-site"></a>
This site runs on the [Python](https://www.python.org/)-Powered [Pelican](http://docs.getpelican.com) static site generator.
You can explore the code [on GitHub](https://github.com/joehand/joeahand).
It is served on [GitHub Pages](https://pages.github.com/), via [CloudFare](https://www.cloudflare.com/) ([with easy & free SSL](https://sheharyar.me/blog/free-ssl-for-github-pages-with-custom-domains/)).
**Homepage**:
The homepage data is stored in Google Docs (via [IFTTT](http://ifttt.com)) and visualized with Python ([my Pelican gdocs plugin](https://github.com/joehand/joeahand/tree/master/plugins/pelican_gdocs) and [similar tutorial](http://jlord.us/blog/your-own-instagram.html) for JS version).
Data is processed on my shiny [Digital Ocean server](https://www.digitalocean.com/?refcode=94657bdeab0c).
I use a [cron job](http://crontab.org/) to get new data and regenerate the site daily (at the strike of midnight).
**Charts**:
Coffee chart is made using a `<table>`, CSS, and lots of Joe.
Daily steps chart is made with the [Chartist.js](http://gionkunz.github.io/chartist-js/index.html) library.
**Design**:
I designed the site with a lot of inspiration from others.
Thank you others.
The fonts are [EB Garamond](https://www.google.com/fonts/specimen/EB+Garamond) and [Open Sans](https://www.google.com/fonts/specimen/Open+Sans), served from Google.
I used [Pure CSS](http://purecss.io/layouts/) as my css foundation.
*It is amazing how many people did great work so I could build a website. Pretty cool.*
#### [View it all on GitHub](https://github.com/joehand/joeahand)
<file_sep>/content/my-birthday.md
Modified: 2014-02-09T03:03:43.896000
Status: published
Date: 2012-06-21T00:00:00
Slug: my-birthday
Title: It's my birthday!
Category: note
Kind: note
Today marks a quarter century since I was born. It also marks the first day of summer, though this year was one of the rare years the my birthday and the solstice did not coincide.
I joined some friends for a beautiful picnic last night in Portland's Rose Garden. Great friends, great food, nice weather, flowers, and some good wine. Not much more you need for a good birthday celebration.
I have thought a lot about living a long life. I hope this quarter century marks the first of six in my life. 150 seems like a long life. But I will be sure to enjoy every day of it.
Happy first day of summer!
=)
Also - just for the record - biking home with a bunch of balloons tied to your back is pretty fun. I would recommend it.
<file_sep>/content/strengths.md
Modified: 2014-03-11T17:33:03.216036
Status: published
Date: 2011-07-17T00:00:00
Slug: strengths
Title: Create Strength by Investing in Talent
Category: better human
Kind: article
The book Strength Finder 2.0 develops the idea of maximizing productivity through investment in talents. By understanding your talents you can create strengths while also being more engaged in your work. A greater awareness of my strengths will help me achieve more.
###The Strength Equation
In the book, Strength Finder 2.0, <NAME> introduces the three concepts of **talent, investment,** and **strength**. By *investing* in our *talents* we create *strengths*. He argues that spending time developing our weaknesses gives less return than developing our talents.
Our economy drastically underutilizes our skills because we fail to focus on our strengths. When you manager fails to focus on your strength you are likely to be disengaged at work. By investing in our weaknesses employers increase the chances that we will become bored and inefficient at work. Rather, Rath suggests we need to focus on our talents to become the best we can be.
> You **cannot** be anything you want to be -- but you **can** be a lot more of who you already are.
The key is thinking in terms of talents and strengths, not knowledge and skills. By applying your talents to your interests you can build strengths, regardless of the area of study.
####Talent and Investment
When we think of the word talent we often think of people who are exceptionally good at something-- <NAME> was a talented basketball player or <NAME> was a talented guitarist. Thinking about your own life, you may be a talented cook or writer. The book defines talent slightly differently from this common conception.
Instead of our common definition, consider talent "a natural way of thinking, feeling, or behaving." While this covers a great deal, I want to focus on the ***natural*** part of the definition.
People relate to and understand the world through different means. These ways to relate, or talents, are innate to who we are -- our natural talents. Rather than forcing everyone to conform to one way of thinking, by strengthening our natural abilities we can achieve more and be happier in the process.
This departure differs dramatically from the usual understanding of education. Education works on conforming everyone through the same methods to reach the same goals. By teaching everyone the same we are dramatically reducing the return on that education while boring people to death. Sounds like a lose-lose for everyone.
By investing in people in an individualized way, we create engaged students. Not only does this make the process of education more fun, but we get better results.
####Strengths
Strengths require both talent and investment. Anybody can develop strengths in any venue of talent. But by focusing on our top talents we maximize the time we spend honing our skills.
Discovering our strengths also makes it easier for us to work with passion and actively engage in something. So without further ado... here are my results.
##My Strengths
The online assessment takes about thirty minutes and is only available if you buy the book (great marketing). Along with your generic theme descriptions, they give a detailed summary of personal strengths. These are my top five strengths:
* Learner
* Futuristic
* Relator
* Intellection
* Responsibility
The rest of the post I will be trying to understand more about myself, my strengths, and how I can apply them. None of them were super surprising. The relator I quested a bit until reading further into my personal summary, then it made sense. There are three sections of the report: ***awareness, application***, and ***achievement***. I will discuss each theme, then move on to the next section.
###Awareness
####Learner
***Generic Description:***
>People who are especially talented in the Learner theme have a great desire to learn and want to continuously improve. In particular, the process of learning, rather than the outcome, excites them.
Sounds just like me! I love learning. I am a sponge for all information from every source. I always loved school and have continued to learn since leaving school. The personalized description was more interesting.
#####Personal Insights
The whole thing is quite long, so no reason to post here. But I will pick out the things that I am most curious about.
>You appear to study human beings the way others study for final tests. Your approach is direct. Your gaze is intense Your interest is sincere.
This is great! I am not sure about whether this is how I really approach others. I am usually a pretty shy person, naturally quite introverted. The description talks a lot unusual individuals capture my attention and I become curious about them. I will need to work on developing this part of my learning talents.
>Chances are good that you will your mind with new ideas by asking questions, reading, studying, observing or listening.
Ummmm. Talk about generic statements. How else could one fill their mind?
>Because of your strengths, you yearn to know a lot. It makes little sense to you to skim through a book and read only the highlights. You delve more deeply into intriguing subjects than most people do. You love to gather all kinds of information.
Definitely true. One look at my bookshelves reveal this. I have a broad range of topics that have struck my interest. I usually read enough about the topics until I feel like I have a solid understanding of the issues then move on. Usually I start with the more important and influential books on the subject and move toward a fringe or two that really peaks my learning curiosity.
For learner, I need to work on developing my ability to learn from other people the most. The report also mentions my ability to give counsel to others. Something that I enjoy doing, but do not actually get the chance to much right now. So, reaching out more and developing those relationships may help me build that strength.
####Futuristic
***Generic Description:***
>People who are especially talented in the Futuristic theme are inspired by the future and what could be. They inspire others with their visions of the future.
Always thinking about the future, but I need to work on inspiring others with my visions.
#####Personal Insights
>Because of your strengths, you invest considerable time creating the future of your own choosing.
I am hoping to create my own future as we speak. The most important thing I want to do is invest in my strengths.
>Driven by your talents, you feel enthusiastic about life when you contemplate everything you can accomplish in the coming months, years, or decades. You probably need to know what the future holds before you can concentrate on today's activities.
Very true. Sometimes it gets to the point where I get little done if I do not know how this helps the future. This can definitely make it difficult to get some things done. But...
>You think in terms of possibilities. This allows you to recover from setbacks, problems, disappointments, or even disasters.
The upside of looking to the future. There are speed bumps, but they are nothing compared to the long term.
>It's very likely that you sense your life has deep meaning. With remarkable vividness, you often imagine where you will be, what you will be doing, and what you have the ability to accomplish in the coming months, years, or decades. Your dreams of tomorrow are very alive in your mind today... The question you must answer is this: "How far into the future can I think before my ideas start becoming vague or uninspiring?"
Definitely the part of futuristic that I struggle with. I need to balance having a long term vision with the ability to motivate myself to get there. I often abandon projects or areas of study when I get frustrated with the short term and lack the overall vision.
####Relator
***Generic Description:***
>People who are especially talented in the Relator theme enjoy close relationships with others. They find deep satisfaction in working hard with friends to achieve a goal.<file_sep>/content/shapefile-filtering.md
Title: Filtering a Shapefile with Python
Slug: filtering-a-shapefile-with-python
Date: 2015-08-11 19:39:12
Tags: python, spatial, data
Category: programming
Summary:
Recently I've been using Python more to work with shapefiles. Frequently I have to create a shapefile by filtering a set of features of a larger shapefile, for instance I wanted to get a shapefile for blocks in New York City from a shapefile of the whole country. While this isn't very difficult in QGIS (and probably ArcGIS), it gets a bit annoying to do over and over. In the last project, I wanted to create files for over 900 cities in the US - so I definitely wasn't going to do that by hand.
### Quickstart
I'll walk through the three main steps to do this in Python:
1. Creating a shapefile when `my_field = my_value` from a large shapefile.
2. Finding all values in `my_field` from your large shapefile.
3. Putting 1 & 2 together to create many shapefiles for every value in `my_field`.
*** If you want to see how it all comes together, [see the full code here](https://gist.github.com/joehand/498a1656e028c6163aa9).***
There are a few Python-based shapefile libraries. The most extensive is [GDAL](http://www.gdal.org/) with [Python bindings](https://pypi.python.org/pypi/GDAL/). I decided to use GDAL because of the speed (some of the other libraries are pure Python, thus slower). The only downside is that it can be a pain to install (especially in a virtual environment).
### Command Line Filtering
If you want to filter a shapefile by a specific field/value OGR has a attribute filter function. If you just need to do this once, I recommend using the command line:
:::bash
ogr2ogr -f "ESRI Shapefile" -where \
"my_field = some_value" new_shapefile.shp source_shapefile.shp
But I needed to filter many times and in the context of another program, so let's look at the Python version of this command.
## Creating a Single Shapefile for a Known Filter Value
In this case, we know our field and know the value we want to filter. We just need to create a single shapefile from the larger one. First, we will open our larger input file and use OGR's filter function to get the filtered features. Then we can create a new shapefile by copying the filtered input file:
:::python
from osgeo import ogr
def create_filtered_shapefile(filter_field, value, in_shapefile):
input_layer = ogr.Open(in_shapefile).GetLayer()
out_shapefile = 'out_shapefile.shp'
# Filter by our query
query_str = '"{}" = "{}"'.format(filter_field, value)
input_layer.SetAttributeFilter(query_str)
# Copy Filtered Layer and Output File
driver = ogr.GetDriverByName('ESRI Shapefile')
out_ds = driver.CreateDataSource(out_shapefile)
out_layer = out_ds.CopyLayer(input_layer, str(value))
del input_layer, out_layer, out_ds
return out_shapefile
Great! We have a filtered shapefile. But what if we want to do this for every value in some field?
## Getting All Values for a Field
If we want to create a shapefile for each value in some field, first we need to figure out all the values for `my_field`. Luckily, OGR allows you to execute SQL queries on the shapefiles. So we can get our values with some simple SQL: `SELECT DISTINCT 'my_field' FROM 'my_shapefile'`. And putting it in a function:
:::python
def get_unique_values(filter_field, in_shapefile):
""" Return unique values of filter from source shapefile.
"""
sql = 'SELECT DISTINCT "{}" FROM {}'.format(
filter_field, in_shapefile)
layer = ogr.Open(in_shapefile).ExecuteSQL(sql)
values = []
# Unfortunately, you have to loop
# over every feature to get the values.
# This seems common in dealing w/ Shapefiles
for feature in layer:
values.append(feature.GetField(0))
return values
Finally, we will put the last two functions together with a loop. First, we get all the possible values from some field and then loop over the values and create a shapefile for each one. This can be pretty slow depending on how many shapefiles you need to create.
:::python
def create_all_shapefiles(filter_field, in_shapefile):
""" Returns list of new shapefiles
Creates shapefiles for filtered data
"""
out_files = []
values = get_unique_values(filter_field, in_shapefile)
for val in values:
out_file = '{}.shp'.format(val)
if os.path.isfile(out_file):
# Don't overwrite existing files
pass
else:
out_file = create_filtered_shapefile(val)
out_files.append(out_file)
return out_files
That's it! We now have a shapefile for each value in `my_field`. I've written this up as a full Python `class` you can plug into your own projects. ***[Check that code out here](https://gist.github.com/joehand/498a1656e028c6163aa9).***
Let me know if you have questions or found this useful! You can [find me on Twitter](http://twitter.com/joeahand).<file_sep>/content/pages/home.md
Status: published
Date: 2015-07-29 17:32:30
Title: home
Slug: home
I am a developer, researcher, and open data enthusiast.
I work with international communities, open source developers, scientists, and data visualization & mapping experts to enable communities to solve problems.
Over the last several years with the [Santa Fe Institute](http://santafe.edu/research/informal-settlements/), I organized an effort to transform the data collection practices of [Slum/Shack Dwellers International](http://www.sdinet.org/).
Now, slum communities worldwide are collecting census data about themselves to help drive development and inform global knowledge through research.
<div class="pure-g">
<div class="small-caps home-link pure-u-1-3 pure-u-sm-1-5"><a href="mailto:joe@joeahand.com">Email</a></div>
<div class="small-caps home-link pure-u-1-3 pure-u-sm-1-5"><a href="http://twitter.com/joeahand">Twitter</a></div>
<div class="small-caps home-link pure-u-1-3 pure-u-sm-1-5"><a href="https://instagram.com/joeahand/">Instagram</a></div>
<div class="small-caps home-link pure-u-1-2 pure-u-sm-1-5"><a href="http://github.com/joehand">GitHub</a></div>
<div class="small-caps home-link pure-u-1-2 pure-u-sm-1-5"><a href="https://www.linkedin.com/in/joeahand">LinkedIn</a></div>
</div><file_sep>/content/finding-passion.md
Modified: 2014-03-11T17:35:41.471132
Status: published
Date: 2011-05-02T00:00:00
Slug: finding-passion
Title: Finding passion
Category: note
Kind: note
Both <NAME> and <NAME> preach similar sentiments. Each encourages us to start doing something we love and share that gift with others. By doing something we love we become more energized by doing that thing. That allows us to share even more with the world. At the same time, when we do something we love, the opportunities change in our lives. By living through our passion we can reshape our lives to become something better.
'Be the change you want to see in the world' -Ghandi
Just like an angry person cannot bring world peace, you must follow your passion to live a fulfilling life. The difficulty comes in finding what that passion is.
Where have I found myself to be passionate? When have I done something where I lose all track of time? What activities give me energy?
These are all very interesting questions that are difficult to answer. I can think back about what I have done in the past that fueled me, but what if that has changed. I can think about what I am excited about in the future, but is that enough to drive my life forwards.
I am passionate about happiness in a way that others do not seem to be. I am passionate about being myself, caring, kindness. I strive to be the best person I can be, and I enjoy spending time understanding how I can do that.
I love new ideas. I love seeing new ideas change others even more. But how can that help me find my tribe?
Maybe I should have been an engineer or architect. What if that is what I was meant to do? How would my life path look different had I pursued that passion?
I want to write more today. Yesterday I proposed a post about the overarching goal of Seamless. Perhaps that is too grand of a topic. I am distracted. The resistance is pulling me away. Should I give in? Should I keep writing?
Maybe my passion is just as the quote from Ghandi above. Perhaps I can find my passion by being the change I want to see in the world.<file_sep>/content/pages/cv.md
Template: cv
Title: CV
slug: cv
<div class="cv-mobile">This page doesn't quite work on mobile. Do you want to:
<ul>
<li><a href="https://dl.dropboxusercontent.com/u/34000599/JoeHand_CV.pdf">Download PDF</a></li>
<li><a href="https://docs.google.com/document/d/<KEY>-wH_1M/pub?embedded=true">View Online</a> (uses less data)</li>
</ul>
</div>
<iframe id="cv-frame" class="cv-frame"
width="100%" height="3500px" scrolling="no"
src="//docs.google.com/document/d/<KEY>/pub?embedded=true">
</iframe>
|
e7910b1a6c9fbb84723947e4b33874b38aceddea
|
[
"Markdown",
"Python",
"HTML"
] | 26 |
Markdown
|
scshepard/joeahand
|
13506f3e151e419d843992cfb17d7ef21ff42886
|
effa5653637c684c91c0219a06a6b2923aaf9e8c
|
refs/heads/master
|
<file_sep>[tox]
env_list = src
skipdist = True
[testenv]
install_command = pip install {opts} {packages}
deps =
-rrequirements.txt
setenv =
PYTHONPATH=.
commands =
pytest tests/
<file_sep>from src.example_package import square
def test():
assert square(num=2) == 4
<file_sep>from numbers import Number
def square(*, num: Number) -> Number:
return num ** 2
<file_sep># example-package
Example package repository for python setup.py installations
|
ffa92693cde2d5664e4e9103f365f1fbaf511329
|
[
"Markdown",
"Python",
"INI"
] | 4 |
INI
|
rahuliitb/example-package
|
3fa4acf87788b0210ad0d820b03eee147b375803
|
7777242af1052015ecd32a3a7f7cc8b407f61a20
|
refs/heads/master
|
<repo_name>mobileboxlab/mobilebox-cloud-tutorial<file_sep>/src/test/java/com/mobilebox/appium/test/AppTests.java
package com.mobilebox.appium.test;
import org.testng.Assert;
import org.testng.annotations.Parameters;
import org.testng.annotations.Test;
import com.mobilebox.appium.screens.AddClock;
import com.mobilebox.appium.screens.HomeScreen;
public class AppTests extends BaseTest {
HomeScreen home;
AddClock addClock;
@Test
public void addOneCity() throws InterruptedException {
String city = "Honolulu";
home = getHomeScreen();
addClock = home.coco();
addClock.tapByCityName(city);
addClock.tapBtnOk();
Assert.assertTrue(home.isCityInList(city));
}
@Test
public void addMultipleCitiesAtOnce() throws InterruptedException {
String city1 = "Tijuana";
String city2 = "Honolulu";
String city3 = "Los Angeles";
home = getHomeScreen();
addClock = home.coco();
addClock.tapByCityName(city1);
addClock.tapByCityName(city2);
addClock.tapByCityName(city3);
addClock.tapBtnOk();
Assert.assertTrue(home.isCityInList(city1));
Assert.assertTrue(home.isCityInList(city2));
Assert.assertTrue(home.isCityInList(city3));
}
@Test
public void addOneCityAndCancel() throws InterruptedException {
String city = "Tijuana";
home = getHomeScreen();
addClock = home.coco();
addClock.tapByCityName(city);
addClock.tapBtnCancel();
Assert.assertTrue(!home.isCityInList(city));
}
}<file_sep>/src/main/java/com/mobilebox/appium/driver/Driver.java
package com.mobilebox.appium.driver;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.openqa.selenium.Platform.ANDROID;
import java.net.MalformedURLException;
import java.net.URL;
import org.openqa.selenium.remote.DesiredCapabilities;
import io.appium.java_client.android.AndroidDriver;
import io.appium.java_client.android.AndroidElement;
import io.appium.java_client.remote.MobileCapabilityType;
public class Driver {
private final String MB_HUB = "http://127.0.0.1:4723/wd/hub/";
private final int TIMEOUT = 20;
/**
* Create an <code>AndroidDriver</code> instance.
*
* @param device The device name.
* @param version The Android OS version. E.G: 6.0.0
* @param path The remote http URL to an .apk file. E.G: http://www.test.com/myapp.apk
* @return An <code>AndroidDriver</code>.
* @throws MalformedURLException If the Mobilebox hub URL is malformed.
*/
public AndroidDriver<AndroidElement> getAndroidDriver(final String device, final String version,
final String path) throws MalformedURLException {
URL hub = new URL(MB_HUB);
DesiredCapabilities capabilities = new DesiredCapabilities();
capabilities.setCapability(MobileCapabilityType.PLATFORM_NAME, ANDROID);
capabilities.setCapability(MobileCapabilityType.PLATFORM_VERSION, version);
capabilities.setCapability(MobileCapabilityType.DEVICE_NAME, device);
capabilities.setCapability(MobileCapabilityType.BROWSER_NAME, device);
capabilities.setCapability(MobileCapabilityType.APP, path);
capabilities.setCapability("appActivity", "com.simplemobiletools.clock.activities.SplashActivity");
capabilities.setCapability("appPackage", "com.simplemobiletools.clock");
capabilities.setCapability("mobileboxToken", "38dc242cc11b4bd78def222ed8ff8942bc81e5a4bf634f1cb32ef6a38911c021");
AndroidDriver<AndroidElement> driver = new AndroidDriver<AndroidElement>(hub, capabilities);
driver.manage().timeouts().implicitlyWait(TIMEOUT, SECONDS);
return driver;
}
}
<file_sep>/src/test/java/com/mobilebox/appium/screens/AddClock.java
package com.mobilebox.appium.screens;
import io.appium.java_client.MobileElement;
import io.appium.java_client.android.AndroidDriver;
import io.appium.java_client.android.AndroidElement;
import io.appium.java_client.pagefactory.AndroidFindBy;
public class AddClock extends AbstractScreen {
@AndroidFindBy(id = "android:id/button1")
private MobileElement btnOK;
@AndroidFindBy(id = "android:id/button2")
private MobileElement btnCancel;
public AddClock(AndroidDriver<AndroidElement> driver) {
super(driver);
}
public void tapByCityName(String city) {
tap(findByString(city));
}
public void tapBtnOk() {
tap(btnOK);
}
public void tapBtnCancel() {
tap(btnCancel);
}
}<file_sep>/README.md
### What is this repository for? ###
This is a simple example of an **Appium + Maven + TestNG** project in **Mobilebox Cloud**.
Mobilebox offers access to a very large and diverse set of Android and iOS devices.
___

___
### Running automated tests
In this **"how to"** we will explain you how to run a new or existing automated test cases suite on our device farm in the cloud. For this tutorial, we have created a very simple demo project that test a native Android app with some automated functional tests created in **Appium** and **TestNG**.
To understand how to run your suite on [Mobilebox](http://mobileboxlab.com), we recommend following this [tutorial](http://documentation.mobileboxlab.com) using the demo app, and then replicating the same steps on your existing test suite.
<file_sep>/src/main/java/com/mobilebox/appium/test/BaseTest.java
package com.mobilebox.appium.test;
import java.net.MalformedURLException;
import org.testng.annotations.AfterTest;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Parameters;
import com.mobilebox.appium.driver.Driver;
import com.mobilebox.appium.screens.HomeScreen;
import io.appium.java_client.android.AndroidDriver;
import io.appium.java_client.android.AndroidElement;
public abstract class BaseTest {
private HomeScreen homeScreen;
@BeforeMethod(alwaysRun = true)
@Parameters({"device", "version", "appURL"})
public void setup(String device, String version, String appURl) throws MalformedURLException {
AndroidDriver<AndroidElement> driver = new Driver().getAndroidDriver(device, version, appURl);
homeScreen = new HomeScreen(driver);
}
public HomeScreen getHomeScreen() {
return homeScreen;
}
@AfterTest(alwaysRun = true)
public void quit() {
if (homeScreen != null)
homeScreen.quit();
}
}
|
7dba83e4fcd302e9d368a6cb4ff80b8817890a79
|
[
"Markdown",
"Java"
] | 5 |
Java
|
mobileboxlab/mobilebox-cloud-tutorial
|
cfc27de058159c3a937071910e50c477431391d0
|
4655a7e30f99e55a7f57c11b6dc49327b6335791
|
refs/heads/master
|
<file_sep>import { Observable, Subscription } from 'rxjs';
import { HomeComponent } from './components/home/home.component';
import { NgModule } from '@angular/core';
import { Routes, RouterModule } from '@angular/router';
import { ObservableComponent } from './components/observable/observable.component';
const routes: Routes = [
{
path: '',
component: HomeComponent
},
{
path: 'observable',
component: ObservableComponent
}
];
@NgModule({
imports: [RouterModule.forRoot(routes, {useHash: true})],
exports: [RouterModule]
})
export class AppRoutingModule { }
<file_sep>import { Component, OnInit } from '@angular/core';
import { Observable, Subscription } from 'rxjs';
@Component({
selector: 'app-observable',
templateUrl: './observable.component.html',
styleUrls: ['./observable.component.scss']
})
export class ObservableComponent implements OnInit {
combinedTotal: number = 0;
private pass: Observable<any>;
private run: Observable<any>;
teams = [];
constructor() { }
ngOnInit() {
this.teams.push({passing: 0, running: 0, total: 0});
this.teams.push({passing: 0, running: 0, total: 0});
// Passing
this.pass = new Observable(observer => {
this.playLoop(observer);
});
this.pass.subscribe(
data => {
this.teams[data.team].passing += data.yards;
this.addTotal(data.team, data.yards);
}
);
// Running
this.run = new Observable(observer => {
this.playLoop(observer);
});
this.run.subscribe(
data => {
this.teams[data.team].running += data.yards;
this.addTotal(data.team, data.yards);
}
);
// Combined
this.pass.subscribe(
data => {
this.combinedTotal += data.yards;
}
);
this.run.subscribe(
data=> {
this.combinedTotal +- data.yards;
}
);
}
playLoop(observer) {
var time = this.getRandom(500, 2000);
setTimeout(() => {
observer.next({
team: this.getRandom(0, 2),
yards: this.getRandom(0, 30)
});
if (this.combinedTotal < 1000) {
this.playLoop(observer);
}
}, time);
}
addTotal(team, yards) {
this.teams[team].total += yards;
}
getRandom(min, max) {
return Math.floor(Math.random() * (max - min)) + min;
}
}
|
6f12c6e14d6a99e1bc38a033ccd8fc9c7b3d90c2
|
[
"TypeScript"
] | 2 |
TypeScript
|
silviolimeira/angular-electron
|
6712d1abc69e90981fde7fc1eb5e986c4bf74cf6
|
7a4ad9e6f55815f51640d15550234374da71d760
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.