branch_name
stringclasses 149
values | text
stringlengths 23
89.3M
| directory_id
stringlengths 40
40
| languages
listlengths 1
19
| num_files
int64 1
11.8k
| repo_language
stringclasses 38
values | repo_name
stringlengths 6
114
| revision_id
stringlengths 40
40
| snapshot_id
stringlengths 40
40
|
---|---|---|---|---|---|---|---|---|
refs/heads/master
|
<repo_name>FriXeee/CSGO-Legit-Hack<file_sep>/README.md
# CSGO-Legit-Hack
This is a very old source, might still be Undetected
If you need anything from it , you can simply use it you have my approval.
Most of the offsets are updated, so the source need to be updated.
The cheat was coded using VS 2013, you can still use 2019 and convert it.
<file_sep>/AnkFEST/H_Math.h
#include "H_Include.h"
#define M_PI 3.14159265358979323846
class Vector
{
public:
float x;
float y;
float z;
};
class C_Math
{
public:
float ATAN2(float x, float y)
{
if (y < 0){
return -ATAN2(x, -y);
}
else if (x < 0){
return M_PI - atan(-y / x);
}
else if (x > 0){
return atan(y / x);
}
else if (y != 0){
return M_PI / 2;
}
}
float VectorLength(float *v)
{
return (float)sqrt(v[0] * v[0] + v[1] * v[1] + v[2] * v[2]);
}
void AngleVectors(float *angles, float *forward, float *right, float *up)
{
float angle;
static float sp, sy, cp, cy;
angle = angles[0] * (M_PI / 180);
sp = sin(angle);
cp = cos(angle);
angle = angles[1] * (M_PI / 180);
sy = sin(angle);
cy = cos(angle);
if (forward)
{
forward[0] = cp*cy;
forward[1] = cp*sy;
forward[2] = -sp;
}
if (right || up)
{
static float sr, cr;
angle = angles[2] * (M_PI / 180);
sr = sin(angle);
cr = cos(angle);
if (right)
{
right[0] = -1 * sr*sp*cy + -1 * cr*-sy;
right[1] = -1 * sr*sp*sy + -1 * cr*cy;
right[2] = -1 * sr*cp;
}
if (up)
{
up[0] = cr*sp*cy + -sr*-sy;
up[1] = cr*sp*sy + -sr*cy;
up[2] = cr*cp;
}
}
}
float VectorNormalize(Vector& vec)
{
float radius = sqrtf(vec.x*vec.x + vec.y*vec.y + vec.z*vec.z);
float iradius = 1.f / (radius + FLT_EPSILON);
vec.x *= iradius;
vec.y *= iradius;
vec.z *= iradius;
return radius;
}
void VectorAngles(const float *forward, float *angles)
{
float tmp, yaw, pitch;
if (forward[1] == 0 && forward[0] == 0)
{
yaw = 0;
if (forward[2] > 0)
pitch = 270;
else
pitch = 90;
}
else
{
yaw = (atan2(forward[1], forward[0]) * 180 / M_PI);
if (yaw < 0)
yaw += 360;
tmp = sqrt(forward[0] * forward[0] + forward[1] * forward[1]);
pitch = (atan2(-forward[2], tmp) * 180 / M_PI);
if (pitch < 0)
pitch += 360;
}
angles[0] = pitch;
angles[1] = yaw;
angles[2] = 0;
}
void NormalizeAngles(float *angle)
{
for (int axis = 2; axis >= 0; --axis)
{
while (angle[axis] > 180.f)
{
angle[axis] -= 360.f;
}
while (angle[axis] < -180.f)
{
angle[axis] += 360.f;
}
}
}
}; extern C_Math g_Math;<file_sep>/AnkFEST/H_Globals.h
#include "H_Include.h"
class C_GlobalVars
{
public:
PBYTE dwBoneBase;
bool bBehind;
int Window_x;
int Window_y;
int Window_w;
int Window_h;
int Screen_Width;
int Screen_Height;
PBYTE dwEngine;
PBYTE dwClient;
HANDLE hProcess;
HANDLE hSnapshot;
DWORD hPID;
HWND HandleWindow;
float vAimVectorGeneral[3];
float Delta[3];
float Diff[3];
float GeneralAimbot[3];
float m_vecVelocity[3];
float angle1[3];
float angle2[3];
float angle3[3];
float angle4[3];
float angle5[3];
float youpos[3];
float Angle[3];
float Aimmin[3];
float TargetSelection;
float TargetSelection2;
float fNearestTarget;
float Distance;
float fovdist;
int iNearestTarget;
int m_iWeaponID;
std::string pfad;
char ini[260];
struct Aimbot
{
int Enabled;
int Key;
int FOV;
float Smooth;
int Bone;
int RCS;
}Aimbot;
struct Triggerbot
{
int Enabled;
int Key;
int Sleep1;
int Sleep2;
}Triggerbot;
struct Radar
{
int Enabled;
int Distance;
}Radar;
struct GESP
{
int Enabled;
}GESP;
int PanicKey;
int EndKey;
int ReloadKey;
}; extern C_GlobalVars g_Vars;<file_sep>/AnkFEST/H_Module.h
#include "H_Include.h"
class C_Module
{
public:
PBYTE GetModule(HANDLE Snapshot, string Module)
{
PBYTE ModuleAddy;
string Compare;
MODULEENTRY32 ME32;
if (Snapshot == INVALID_HANDLE_VALUE)
return (PBYTE)ERROR_INVALID_HANDLE;
else
{
ME32.dwSize = sizeof(MODULEENTRY32);
if (!Module32First(Snapshot, &ME32))
return (PBYTE)GetLastError();
}
while (Compare != Module)
{
if (!Module32Next(Snapshot, &ME32))
return (PBYTE)GetLastError();
else
Compare = string(ME32.szModule);
}
ModuleAddy = ME32.modBaseAddr;
return ModuleAddy;
}
int GetProcID(string ProcName)
{
PROCESSENTRY32 PE32;
HANDLE ProcSnapshot = CreateToolhelp32Snapshot(TH32CS_SNAPPROCESS, 0);
if (ProcSnapshot == INVALID_HANDLE_VALUE)
return 0;
PE32.dwSize = sizeof(PROCESSENTRY32);
if (!Process32First(ProcSnapshot, &PE32))
{
CloseHandle(ProcSnapshot);
return 0;
}
else
if (PE32.szExeFile == ProcName)
{
CloseHandle(ProcSnapshot);
return PE32.th32ProcessID;
}
else
{
do
{
if (PE32.szExeFile == ProcName)
{
CloseHandle(ProcSnapshot);
return PE32.th32ProcessID;
}
} while (Process32Next(ProcSnapshot, &PE32));
CloseHandle(ProcSnapshot);
return 0;
}
}
bool SetDebugPrivilege()
{
HANDLE hProcess = GetCurrentProcess(), hToken;
TOKEN_PRIVILEGES priv;
LUID luid;
OpenProcessToken(hProcess, TOKEN_ADJUST_PRIVILEGES, &hToken);
LookupPrivilegeValue(0, "seDebugPrivilege", &luid);
priv.PrivilegeCount = 1;
priv.Privileges[0].Luid = luid;
priv.Privileges[0].Attributes = SE_PRIVILEGE_ENABLED;
AdjustTokenPrivileges(hToken, false, &priv, 0, 0, 0);
CloseHandle(hToken);
CloseHandle(hProcess);
return true;
}
float GetPrivateProfileFloat(const char* section, const char* key, float def_value, const char* filename)
{
char buffer[64];
if (::GetPrivateProfileString(section, key, "", buffer, sizeof(buffer), filename))
return static_cast<float>(atof(buffer));
return static_cast<float>(def_value);
}
}; extern C_Module g_Module;<file_sep>/AnkFEST/H_Offsets.h
#define c_dwLocalBaseEntity 0xD28B1C
#define c_dwBaseEntity 0x4D3C68C
#define ViewAng 0x588D9C
#define ViewAngOff 0x4D88
#define hitboxpos 0x138
#define MyTeamAD 0xF4
#define Recoil_Offset 0x70
#define m_local 0x2FBC
#define Healths 0x100
#define BoneOffset 0x26A8
#define c_dwEnginePosition 0x588D9C
#define c_dwWeaponID 0x2FAA
#define c_dwWeaponHandle 0x2EF8
#define OFFSET_CROSSHAIRID 0xB3D4
<file_sep>/AnkFEST/AnkFEST.cpp
#include "H_Include.h"
#define MPI 3.14159265358979323846
#define DegToRad MPI / 180
C_GlobalVars g_Vars;
C_Module g_Module;
C_Math g_Math;
C_Nospread g_Nospread;
int UPD = 100;
int UPDSET = 50;
bool GeneralPanic = false;
EXTERN_C IMAGE_DOS_HEADER __ImageBase;
HWND m_hWnd;
bool InitializeSettings()
{
GetModuleFileName((HINSTANCE)&__ImageBase, g_Vars.ini, _countof(g_Vars.ini));
for (int i = 0; i < (int)strlen(g_Vars.ini); i++)
{
if (g_Vars.ini[strlen(g_Vars.ini) - i] == '\\')
{
g_Vars.ini[(strlen(g_Vars.ini) - i) + 1] = '\0';
strcat(g_Vars.ini, ".\\Config.cfg");
break;
}
}
g_Vars.Aimbot.Key = g_Module.GetPrivateProfileFloat("Horcrux", "aimbot.key", 0, g_Vars.ini);
g_Vars.EndKey = g_Module.GetPrivateProfileFloat("Horcrux", "aimbot.end.key", 0, g_Vars.ini);
g_Vars.Triggerbot.Key = g_Module.GetPrivateProfileFloat("Horcrux", "triggerbot.key", 0, g_Vars.ini);
g_Vars.ReloadKey = g_Module.GetPrivateProfileFloat("Horcrux", "cfg.reload.key", 0, g_Vars.ini);
g_Vars.Aimbot.Enabled = 1;
g_Vars.Aimbot.FOV = 0;
g_Vars.Aimbot.Smooth = 0;
g_Vars.Aimbot.Bone = 0;
g_Vars.Aimbot.RCS = 0;
g_Vars.Triggerbot.Enabled = 1;
return true;
}
typedef struct C_LocalBase
{
PBYTE LocalBase;
PBYTE AngBase;
PBYTE CWeaponBase;
float flAngle[3];
float i1[3];
float MeFov[3];
float recoil[3];
float vPosition[3];
int MyTeam;
int m_WeaponHandle;
int m_WeaponIDFirst;
void Clear()
{
this->LocalBase = 0x0;
this->AngBase = 0x0;
this->CWeaponBase = 0x0;
}
void ReadStructure()
{
this->Clear();
ReadProcessMemory(g_Vars.hProcess, g_Vars.dwClient + c_dwLocalBaseEntity, &LocalBase, sizeof(LocalBase), NULL);
ReadProcessMemory(g_Vars.hProcess, g_Vars.dwEngine + ViewAng, &AngBase, sizeof(AngBase), NULL);
ReadProcessMemory(g_Vars.hProcess, AngBase + ViewAngOff, &flAngle, sizeof(flAngle), NULL);
ReadProcessMemory(g_Vars.hProcess, LocalBase + hitboxpos, &i1, sizeof(i1), NULL);
ReadProcessMemory(g_Vars.hProcess, LocalBase + MyTeamAD, &MyTeam, sizeof(MyTeam), NULL);
ReadProcessMemory(g_Vars.hProcess, AngBase + ViewAngOff, &MeFov, sizeof(MeFov), NULL);
ReadProcessMemory(g_Vars.hProcess, LocalBase + Recoil_Offset + m_local, &recoil, sizeof(recoil), NULL);
recoil[0] *= 2;
recoil[1] *= 2;
ReadProcessMemory(g_Vars.hProcess, g_Vars.dwEngine + c_dwEnginePosition, &vPosition, sizeof(vPosition), 0);
ReadProcessMemory(g_Vars.hProcess, LocalBase + c_dwWeaponHandle, &m_WeaponHandle, 4, 0);
m_WeaponIDFirst = m_WeaponHandle & 0xFFF;
ReadProcessMemory(g_Vars.hProcess, g_Vars.dwClient + c_dwBaseEntity + ((0x10 * m_WeaponIDFirst) - 0x10), &CWeaponBase, sizeof(CWeaponBase), 0);
ReadProcessMemory(g_Vars.hProcess, CWeaponBase + c_dwWeaponID, &g_Vars.m_iWeaponID, 4, NULL);
//cout << g_Vars.m_iWeaponID << endl;
}
} LocalBasePlayer;
typedef struct C_BasePlayer
{
PBYTE dw_BasePointer;
PBYTE dw_BoneMatrix;
int hp;
int Team;
float you[3];
float PlayerPos[3];
float VecView[3];
void Clear()
{
this->dw_BasePointer = 0x0;
this->dw_BoneMatrix = 0x0;
this->hp = 0;
this->Team = 0;
this->you[0] = 0;
this->you[1] = 0;
this->you[2] = 0;
}
void ReadStructure(int i)
{
this->Clear();
ReadProcessMemory(g_Vars.hProcess, g_Vars.dwClient + c_dwBaseEntity + ((i - 1) * 16), &dw_BasePointer, sizeof(dw_BasePointer), NULL);
ReadProcessMemory(g_Vars.hProcess, dw_BasePointer + Healths, &hp, sizeof(hp), NULL);
ReadProcessMemory(g_Vars.hProcess, dw_BasePointer + MyTeamAD, &Team, sizeof(Team), NULL);
ReadProcessMemory(g_Vars.hProcess, dw_BasePointer + BoneOffset, &dw_BoneMatrix, sizeof(dw_BoneMatrix), NULL);
ReadProcessMemory(g_Vars.hProcess, dw_BoneMatrix + ((48 * (g_Vars.Aimbot.Bone)) + 12), &you[0], sizeof(you[0]), NULL);
ReadProcessMemory(g_Vars.hProcess, dw_BoneMatrix + ((48 * (g_Vars.Aimbot.Bone)) + 28), &you[1], sizeof(you[1]), NULL);
ReadProcessMemory(g_Vars.hProcess, dw_BoneMatrix + ((48 * (g_Vars.Aimbot.Bone)) + 44), &you[2], sizeof(you[2]), NULL);
ReadProcessMemory(g_Vars.hProcess, dw_BasePointer + 0x134, &PlayerPos, sizeof(PlayerPos), NULL);
ReadProcessMemory(g_Vars.hProcess, dw_BasePointer + 0x104, &VecView, sizeof(VecView), NULL);
VecView[0] = VecView[0] + PlayerPos[0];
VecView[1] = VecView[1] + PlayerPos[1];
VecView[2] = VecView[2] + PlayerPos[2];
you[2] -= 65;
}
} BasePlayer[64];
void DropTarget()
{
g_Vars.vAimVectorGeneral[0] = 0;
g_Vars.vAimVectorGeneral[1] = 0;
g_Vars.vAimVectorGeneral[2] = 0;
g_Vars.fNearestTarget = 99999.9f;
g_Vars.iNearestTarget = -1;
}
DWORD Aimbot(LPVOID pParam)
{
while (true)
{
Sleep(5);
if (g_Vars.Aimbot.Enabled == 1)
{
LocalBasePlayer gLocalBasePlayer;
C_BasePlayer gBasePlayer[65];
gLocalBasePlayer.ReadStructure();
if (GetAsyncKeyState(g_Vars.Aimbot.Key))
{
DropTarget();
for (int i = 0; i < 64; i++)
{
gBasePlayer[i].ReadStructure(i);
if (gBasePlayer[i].hp > 0)
{
if (gBasePlayer[i].Team != gLocalBasePlayer.MyTeam)
{
g_Vars.Distance = sqrt(((gLocalBasePlayer.i1[0] - gBasePlayer[i].you[0]) * (gLocalBasePlayer.i1[0] - gBasePlayer[i].you[0])) + ((gLocalBasePlayer.i1[1] - gBasePlayer[i].you[1]) * (gLocalBasePlayer.i1[1] - gBasePlayer[i].you[1])) + ((gLocalBasePlayer.i1[2] - gBasePlayer[i].you[2]) * (gLocalBasePlayer.i1[2] - gBasePlayer[i].you[2])));
g_Vars.Angle[0] = ((asin((gBasePlayer[i].you[2] - gLocalBasePlayer.i1[2]) / g_Vars.Distance) * 180 / M_PI) * -1);
g_Vars.Angle[1] = (g_Math.ATAN2(gBasePlayer[i].you[0] - gLocalBasePlayer.i1[0], gBasePlayer[i].you[1] - gLocalBasePlayer.i1[1]) / M_PI * 180);
if (g_Vars.Aimbot.RCS == 0)
{
g_Vars.Aimmin[0] = g_Vars.Angle[0] - (gLocalBasePlayer.MeFov[0]);
g_Vars.Aimmin[1] = g_Vars.Angle[1] - (gLocalBasePlayer.MeFov[1]);
}
else
{
g_Vars.Aimmin[0] = g_Vars.Angle[0] - (gLocalBasePlayer.MeFov[0] + gLocalBasePlayer.recoil[0]);
g_Vars.Aimmin[1] = g_Vars.Angle[1] - (gLocalBasePlayer.MeFov[1] + gLocalBasePlayer.recoil[1]);
}
if (g_Vars.Aimmin[0] > 180) { g_Vars.Aimmin[0] -= 360; }
if (g_Vars.Aimmin[1] > 180) { g_Vars.Aimmin[1] -= 360; }
if (g_Vars.Aimmin[0] < -180) { g_Vars.Aimmin[0] += 360; }
if (g_Vars.Aimmin[1] < -180) { g_Vars.Aimmin[1] += 360; }
g_Vars.vAimVectorGeneral[0] = gBasePlayer[i].you[0];
g_Vars.vAimVectorGeneral[1] = gBasePlayer[i].you[1];
g_Vars.vAimVectorGeneral[2] = gBasePlayer[i].you[2];
g_Vars.GeneralAimbot[0] = gLocalBasePlayer.i1[0] - g_Vars.vAimVectorGeneral[0];
g_Vars.GeneralAimbot[1] = gLocalBasePlayer.i1[1] - g_Vars.vAimVectorGeneral[1];
g_Vars.GeneralAimbot[2] = gLocalBasePlayer.i1[2] - g_Vars.vAimVectorGeneral[2];
g_Vars.fovdist = sqrt((g_Vars.Aimmin[0] * g_Vars.Aimmin[0]) + (g_Vars.Aimmin[1] * g_Vars.Aimmin[1]));
g_Vars.TargetSelection = g_Math.VectorLength(g_Vars.GeneralAimbot);
g_Vars.TargetSelection2 = g_Vars.fovdist;
if (g_Vars.fovdist < g_Vars.Aimbot.FOV / 2)
{
if (g_Vars.TargetSelection < g_Vars.fNearestTarget && g_Vars.TargetSelection2 < g_Vars.fNearestTarget)
{
g_Vars.fNearestTarget = g_Vars.TargetSelection;
g_Vars.iNearestTarget = i;
if (g_Vars.Aimbot.RCS == 1 && !g_Nospread.IsNonRcs(g_Vars.m_iWeaponID))
{
g_Vars.Angle[0] -= gLocalBasePlayer.recoil[0];
g_Vars.Angle[1] -= gLocalBasePlayer.recoil[1];
}
if (g_Vars.Aimbot.Smooth > 0.0)
{
float Diff[3];
Diff[0] = g_Vars.Angle[0] - gLocalBasePlayer.flAngle[0];
Diff[1] = g_Vars.Angle[1] - gLocalBasePlayer.flAngle[1];
if (Diff[0] > 180) Diff[0] -= 360;
if (Diff[1] > 180) Diff[1] -= 360;
if (Diff[0] < -180) Diff[0] += 360;
if (Diff[1] <-180) Diff[1] += 360;
if (Diff[0] > 2 / g_Vars.Aimbot.Smooth) { g_Vars.Angle[0] = gLocalBasePlayer.flAngle[0] + 2 / g_Vars.Aimbot.Smooth; }
else if (Diff[0] < -2 / g_Vars.Aimbot.Smooth) { g_Vars.Angle[0] = gLocalBasePlayer.flAngle[0] - 2 / g_Vars.Aimbot.Smooth; }
if (Diff[1] > 2 / g_Vars.Aimbot.Smooth) { g_Vars.Angle[1] = gLocalBasePlayer.flAngle[1] + 2 / g_Vars.Aimbot.Smooth; }
else if (Diff[1] < -2 / g_Vars.Aimbot.Smooth) { g_Vars.Angle[1] = gLocalBasePlayer.flAngle[1] - 2 / g_Vars.Aimbot.Smooth; }
if (g_Vars.Angle[0] > 180) g_Vars.Angle[0] -= 360;
if (g_Vars.Angle[1] > 180) g_Vars.Angle[1] -= 360;
if (g_Vars.Angle[0] < -180) g_Vars.Angle[0] += 360;
if (g_Vars.Angle[1] < -180) g_Vars.Angle[1] += 360;
}
if (g_Nospread.IsBadWeapon(g_Vars.m_iWeaponID))
continue;
g_Vars.Angle[2] = 0.0f;
g_Math.NormalizeAngles(g_Vars.Angle);
WriteProcessMemory(g_Vars.hProcess, gLocalBasePlayer.AngBase + ViewAngOff, &g_Vars.Angle, sizeof(g_Vars.Angle), 0);
}
}
}
}
}
}
}
}
}
DWORD Triggerbot(LPVOID pParam)
{
while (true)
{
Sleep(5);
if (g_Vars.Triggerbot.Enabled == 1)
{
if (GetAsyncKeyState(g_Vars.Triggerbot.Key))
{
PBYTE LOCALBASE;
PBYTE BASEPOINTER;
byte ID;
byte HP;
byte TEAM;
byte MYTEAM;
ReadProcessMemory(g_Vars.hProcess, g_Vars.dwClient + c_dwLocalBaseEntity, &LOCALBASE, sizeof(LOCALBASE), NULL);
ReadProcessMemory(g_Vars.hProcess, LOCALBASE + OFFSET_CROSSHAIRID, &ID, sizeof(ID), NULL);
if (ID)
{
ReadProcessMemory(g_Vars.hProcess, g_Vars.dwClient + c_dwBaseEntity + ((ID - 1) * 16), &BASEPOINTER, sizeof(BASEPOINTER), NULL);
ReadProcessMemory(g_Vars.hProcess, BASEPOINTER + Healths, &HP, sizeof(HP), NULL);
if (HP > 0)
{
ReadProcessMemory(g_Vars.hProcess, BASEPOINTER + MyTeamAD, &TEAM, sizeof(TEAM), NULL);
ReadProcessMemory(g_Vars.hProcess, LOCALBASE + MyTeamAD, &MYTEAM, sizeof(MYTEAM), NULL);
if (TEAM != MYTEAM)
{
PostMessage(g_Vars.HandleWindow, WM_LBUTTONDOWN, MK_LBUTTON, 0);
Sleep(g_Vars.Triggerbot.Sleep1);
PostMessage(g_Vars.HandleWindow, WM_LBUTTONUP, MK_LBUTTON, 0);
Sleep(g_Vars.Triggerbot.Sleep2);
}
}
}
}
}
}
}
bool GetWeaponInfos()
{
switch (g_Vars.m_iWeaponID)
{
case WEAPON_GLOCK:
g_Vars.Aimbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.glock.enabled", 0, g_Vars.ini);
g_Vars.Aimbot.FOV = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.glock.fov", 0, g_Vars.ini);
g_Vars.Aimbot.Smooth = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.glock.smooth", 0, g_Vars.ini);
g_Vars.Aimbot.Bone = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.glock.bone", 0, g_Vars.ini);
g_Vars.Triggerbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.glock.enabled", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep1 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.glock.sleep.1", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep2 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.glock.sleep.2", 0, g_Vars.ini);
break;
case WEAPON_ELITE:
g_Vars.Aimbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.elite.enabled", 0, g_Vars.ini);
g_Vars.Aimbot.FOV = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.elite.fov", 0, g_Vars.ini);
g_Vars.Aimbot.Smooth = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.elite.smooth", 0, g_Vars.ini);
g_Vars.Aimbot.Bone = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.elite.bone", 0, g_Vars.ini);
g_Vars.Triggerbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.elite.enabled", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep1 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.elite.sleep.1", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep2 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.elite.sleep.2", 0, g_Vars.ini);
break;
case WEAPON_P250:
g_Vars.Aimbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.p250.enabled", 0, g_Vars.ini);
g_Vars.Aimbot.FOV = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.p250.fov", 0, g_Vars.ini);
g_Vars.Aimbot.Smooth = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.p250.smooth", 0, g_Vars.ini);
g_Vars.Aimbot.Bone = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.p250.bone", 0, g_Vars.ini);
g_Vars.Triggerbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.p250.enabled", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep1 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.p250.sleep.1", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep2 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.p250.sleep.2", 0, g_Vars.ini);
break;
case WEAPON_TEC9:
g_Vars.Aimbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.tec9.enabled", 0, g_Vars.ini);
g_Vars.Aimbot.FOV = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.tec9.fov", 0, g_Vars.ini);
g_Vars.Aimbot.Smooth = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.tec9.smooth", 0, g_Vars.ini);
g_Vars.Aimbot.Bone = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.tec9.bone", 0, g_Vars.ini);
g_Vars.Triggerbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.tec9.enabled", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep1 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.tec9.sleep.1", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep2 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.tec9.sleep.2", 0, g_Vars.ini);
break;
case WEAPON_DEAGLE:
g_Vars.Aimbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.deagle.enabled", 0, g_Vars.ini);
g_Vars.Aimbot.FOV = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.deagle.fov", 0, g_Vars.ini);
g_Vars.Aimbot.Smooth = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.deagle.smooth", 0, g_Vars.ini);
g_Vars.Aimbot.Bone = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.deagle.bone", 0, g_Vars.ini);
g_Vars.Triggerbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.deagle.enabled", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep1 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.deagle.sleep.1", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep2 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.deagle.sleep.2", 0, g_Vars.ini);
break;
case WEAPON_HKP2000:
g_Vars.Aimbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.hkp2000.enabled", 0, g_Vars.ini);
g_Vars.Aimbot.FOV = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.hkp2000.fov", 0, g_Vars.ini);
g_Vars.Aimbot.Smooth = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.hkp2000.smooth", 0, g_Vars.ini);
g_Vars.Aimbot.Bone = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.hkp2000.bone", 0, g_Vars.ini);
g_Vars.Triggerbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.hkp2000.enabled", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep1 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.hkp2000.sleep.1", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep2 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.hkp2000.sleep.2", 0, g_Vars.ini);
break;
case WEAPON_USP:
g_Vars.Aimbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.usp.enabled", 0, g_Vars.ini);
g_Vars.Aimbot.FOV = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.usp.fov", 0, g_Vars.ini);
g_Vars.Aimbot.Smooth = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.usp.smooth", 0, g_Vars.ini);
g_Vars.Aimbot.Bone = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.usp.bone", 0, g_Vars.ini);
g_Vars.Triggerbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.usp.enabled", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep1 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.usp.sleep.1", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep2 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.usp.sleep.2", 0, g_Vars.ini);
break;
case WEAPON_FIVESEVEN:
g_Vars.Aimbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.fiveseven.enabled", 0, g_Vars.ini);
g_Vars.Aimbot.FOV = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.fiveseven.fov", 0, g_Vars.ini);
g_Vars.Aimbot.Smooth = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.fiveseven.smooth", 0, g_Vars.ini);
g_Vars.Aimbot.Bone = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.fiveseven.bone", 0, g_Vars.ini);
g_Vars.Triggerbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.fiveseven.enabled", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep1 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.fiveseven.sleep.1", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep2 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.fiveseven.sleep.2", 0, g_Vars.ini);
break;
case WEAPON_CZ75:
g_Vars.Aimbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.cz75.enabled", 0, g_Vars.ini);
g_Vars.Aimbot.FOV = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.cz75.fov", 0, g_Vars.ini);
g_Vars.Aimbot.Smooth = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.cz75.smooth", 0, g_Vars.ini);
g_Vars.Aimbot.Bone = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.cz75.bone", 0, g_Vars.ini);
g_Vars.Triggerbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.cz75.enabled", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep1 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.cz75.sleep.1", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep2 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.cz75.sleep.2", 0, g_Vars.ini);
break;
case WEAPON_NOVA:
g_Vars.Aimbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.nova.enabled", 0, g_Vars.ini);
g_Vars.Aimbot.FOV = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.nova.fov", 0, g_Vars.ini);
g_Vars.Aimbot.Smooth = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.nova.smooth", 0, g_Vars.ini);
g_Vars.Aimbot.Bone = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.nova.bone", 0, g_Vars.ini);
g_Vars.Triggerbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.nova.enabled", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep1 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.nova.sleep.1", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep2 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.nova.sleep.2", 0, g_Vars.ini);
break;
case WEAPON_XM1014:
g_Vars.Aimbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.xm1014.enabled", 0, g_Vars.ini);
g_Vars.Aimbot.FOV = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.xm1014.fov", 0, g_Vars.ini);
g_Vars.Aimbot.Smooth = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.xm1014.smooth", 0, g_Vars.ini);
g_Vars.Aimbot.Bone = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.xm1014.bone", 0, g_Vars.ini);
g_Vars.Triggerbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.xm1014.enabled", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep1 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.xm1014.sleep.1", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep2 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.xm1014.sleep.2", 0, g_Vars.ini);
break;
case WEAPON_SAWEDOFF:
g_Vars.Aimbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.swadeoff.enabled", 0, g_Vars.ini);
g_Vars.Aimbot.FOV = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.swadeoff.fov", 0, g_Vars.ini);
g_Vars.Aimbot.Smooth = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.swadeoff.smooth", 0, g_Vars.ini);
g_Vars.Aimbot.Bone = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.swadeoff.bone", 0, g_Vars.ini);
g_Vars.Triggerbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.swadeoff.enabled", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep1 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.swadeoff.sleep.1", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep2 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.swadeoff.sleep.2", 0, g_Vars.ini);
break;
case WEAPON_MAG7:
g_Vars.Aimbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.mag7.enabled", 0, g_Vars.ini);
g_Vars.Aimbot.FOV = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.mag7.fov", 0, g_Vars.ini);
g_Vars.Aimbot.Smooth = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.mag7.smooth", 0, g_Vars.ini);
g_Vars.Aimbot.Bone = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.mag7.bone", 0, g_Vars.ini);
g_Vars.Triggerbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.mag7.enabled", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep1 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.mag7.sleep.1", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep2 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.mag7.sleep.2", 0, g_Vars.ini);
break;
case WEAPON_MAC10:
g_Vars.Aimbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.mac10.enabled", 0, g_Vars.ini);
g_Vars.Aimbot.FOV = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.mac10.fov", 0, g_Vars.ini);
g_Vars.Aimbot.Smooth = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.mac10.smooth", 0, g_Vars.ini);
g_Vars.Aimbot.Bone = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.mac10.bone", 0, g_Vars.ini);
g_Vars.Aimbot.RCS = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.mac10.rcs", 0, g_Vars.ini);
g_Vars.Triggerbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.mac10.enabled", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep1 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.mac10.sleep.1", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep2 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.mac10.sleep.2", 0, g_Vars.ini);
break;
case WEAPON_MP7:
g_Vars.Aimbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.mp7.enabled", 0, g_Vars.ini);
g_Vars.Aimbot.FOV = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.mp7.fov", 0, g_Vars.ini);
g_Vars.Aimbot.Smooth = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.mp7.smooth", 0, g_Vars.ini);
g_Vars.Aimbot.Bone = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.mp7.bone", 0, g_Vars.ini);
g_Vars.Aimbot.RCS = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.mp7.rcs", 0, g_Vars.ini);
g_Vars.Triggerbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.mp7.enabled", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep1 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.mp7.sleep.1", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep2 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.mp7.sleep.2", 0, g_Vars.ini);
break;
case WEAPON_UMP45:
g_Vars.Aimbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.ump45.enabled", 0, g_Vars.ini);
g_Vars.Aimbot.FOV = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.ump45.fov", 0, g_Vars.ini);
g_Vars.Aimbot.Smooth = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.ump45.smooth", 0, g_Vars.ini);
g_Vars.Aimbot.Bone = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.ump45.bone", 0, g_Vars.ini);
g_Vars.Aimbot.RCS = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.ump45.rcs", 0, g_Vars.ini);
g_Vars.Triggerbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.ump45.enabled", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep1 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.ump45.sleep.1", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep2 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.ump45.sleep.2", 0, g_Vars.ini);
break;
case WEAPON_P90:
g_Vars.Aimbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.p90.enabled", 0, g_Vars.ini);
g_Vars.Aimbot.FOV = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.p90.fov", 0, g_Vars.ini);
g_Vars.Aimbot.Smooth = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.p90.smooth", 0, g_Vars.ini);
g_Vars.Aimbot.Bone = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.p90.bone", 0, g_Vars.ini);
g_Vars.Aimbot.RCS = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.p90.rcs", 0, g_Vars.ini);
g_Vars.Triggerbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.p90.enabled", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep1 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.p90.sleep.1", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep2 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.p90.sleep.2", 0, g_Vars.ini);
break;
case WEAPON_MP9:
g_Vars.Aimbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.mp9.enabled", 0, g_Vars.ini);
g_Vars.Aimbot.FOV = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.mp9.fov", 0, g_Vars.ini);
g_Vars.Aimbot.Smooth = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.mp9.smooth", 0, g_Vars.ini);
g_Vars.Aimbot.Bone = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.mp9.bone", 0, g_Vars.ini);
g_Vars.Aimbot.RCS = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.mp9.rcs", 0, g_Vars.ini);
g_Vars.Triggerbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.mp9.enabled", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep1 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.mp9.sleep.1", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep2 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.mp9.sleep.2", 0, g_Vars.ini);
break;
case WEAPON_BIZON:
g_Vars.Aimbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.bizon.enabled", 0, g_Vars.ini);
g_Vars.Aimbot.FOV = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.bizon.fov", 0, g_Vars.ini);
g_Vars.Aimbot.Smooth = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.bizon.smooth", 0, g_Vars.ini);
g_Vars.Aimbot.Bone = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.bizon.bone", 0, g_Vars.ini);
g_Vars.Aimbot.RCS = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.bizon.rcs", 0, g_Vars.ini);
g_Vars.Triggerbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.bizon.enabled", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep1 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.bizon.sleep.1", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep2 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.bizon.sleep.2", 0, g_Vars.ini);
break;
case WEAPON_GALILAR:
g_Vars.Aimbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.galil.enabled", 0, g_Vars.ini);
g_Vars.Aimbot.FOV = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.galil.fov", 0, g_Vars.ini);
g_Vars.Aimbot.Smooth = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.galil.smooth", 0, g_Vars.ini);
g_Vars.Aimbot.Bone = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.galil.bone", 0, g_Vars.ini);
g_Vars.Aimbot.RCS = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.galil.rcs", 0, g_Vars.ini);
g_Vars.Triggerbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.galil.enabled", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep1 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.galil.sleep.1", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep2 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.galil.sleep.2", 0, g_Vars.ini);
break;
case WEAPON_AK47:
g_Vars.Aimbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.ak.enabled", 0, g_Vars.ini);
g_Vars.Aimbot.FOV = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.ak.fov", 0, g_Vars.ini);
g_Vars.Aimbot.Smooth = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.ak.smooth", 0, g_Vars.ini);
g_Vars.Aimbot.Bone = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.ak.bone", 0, g_Vars.ini);
g_Vars.Aimbot.RCS = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.ak.rcs", 0, g_Vars.ini);
g_Vars.Triggerbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.ak.enabled", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep1 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.ak.sleep.1", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep2 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.ak.sleep.2", 0, g_Vars.ini);
break;
case WEAPON_SG553:
g_Vars.Aimbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.sg553.enabled", 0, g_Vars.ini);
g_Vars.Aimbot.FOV = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.sg553.fov", 0, g_Vars.ini);
g_Vars.Aimbot.Smooth = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.sg553.smooth", 0, g_Vars.ini);
g_Vars.Aimbot.Bone = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.sg553.bone", 0, g_Vars.ini);
g_Vars.Aimbot.RCS = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.sg553.rcs", 0, g_Vars.ini);
g_Vars.Triggerbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.sg553.enabled", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep1 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.sg553.sleep.1", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep2 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.sg553.sleep.2", 0, g_Vars.ini);
break;
case WEAPON_M4A4:
g_Vars.Aimbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.m4a4.enabled", 0, g_Vars.ini);
g_Vars.Aimbot.FOV = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.m4a4.fov", 0, g_Vars.ini);
g_Vars.Aimbot.Smooth = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.m4a4.smooth", 0, g_Vars.ini);
g_Vars.Aimbot.Bone = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.m4a4.bone", 0, g_Vars.ini);
g_Vars.Aimbot.RCS = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.m4a4.rcs", 0, g_Vars.ini);
g_Vars.Triggerbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.m4a4.enabled", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep1 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.m4a4.sleep.1", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep2 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.m4a4.sleep.2", 0, g_Vars.ini);
break;
case WEAPON_M4A1:
g_Vars.Aimbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.m4a1.enabled", 0, g_Vars.ini);
g_Vars.Aimbot.FOV = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.m4a1.fov", 0, g_Vars.ini);
g_Vars.Aimbot.Smooth = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.m4a1.smooth", 0, g_Vars.ini);
g_Vars.Aimbot.Bone = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.m4a1.bone", 0, g_Vars.ini);
g_Vars.Aimbot.RCS = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.m4a1.rcs", 0, g_Vars.ini);
g_Vars.Triggerbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.m4a1.enabled", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep1 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.m4a1.sleep.1", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep2 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.m4a1.sleep.2", 0, g_Vars.ini);
break;
case WEAPON_FAMAS:
g_Vars.Aimbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.famas.enabled", 0, g_Vars.ini);
g_Vars.Aimbot.FOV = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.famas.fov", 0, g_Vars.ini);
g_Vars.Aimbot.Smooth = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.famas.smooth", 0, g_Vars.ini);
g_Vars.Aimbot.Bone = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.famas.bone", 0, g_Vars.ini);
g_Vars.Aimbot.RCS = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.famas.rcs", 0, g_Vars.ini);
g_Vars.Triggerbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.famas.enabled", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep1 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.famas.sleep.1", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep2 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.famas.sleep.2", 0, g_Vars.ini);
break;
case WEAPON_AUG:
g_Vars.Aimbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.aug.enabled", 0, g_Vars.ini);
g_Vars.Aimbot.FOV = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.aug.fov", 0, g_Vars.ini);
g_Vars.Aimbot.Smooth = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.aug.smooth", 0, g_Vars.ini);
g_Vars.Aimbot.Bone = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.aug.bone", 0, g_Vars.ini);
g_Vars.Aimbot.RCS = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.aug.rcs", 0, g_Vars.ini);
g_Vars.Triggerbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.aug.enabled", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep1 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.aug.sleep.1", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep2 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.aug.sleep.2", 0, g_Vars.ini);
break;
case WEAPON_SSG08:
g_Vars.Aimbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.scout.enabled", 0, g_Vars.ini);
g_Vars.Aimbot.FOV = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.scout.fov", 0, g_Vars.ini);
g_Vars.Aimbot.Smooth = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.scout.smooth", 0, g_Vars.ini);
g_Vars.Aimbot.Bone = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.scout.bone", 0, g_Vars.ini);
g_Vars.Triggerbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.scout.enabled", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep1 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.scout.sleep.1", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep2 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.scout.sleep.2", 0, g_Vars.ini);
break;
case WEAPON_AWP:
g_Vars.Aimbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.awp.enabled", 0, g_Vars.ini);
g_Vars.Aimbot.FOV = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.awp.fov", 0, g_Vars.ini);
g_Vars.Aimbot.Smooth = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.awp.smooth", 0, g_Vars.ini);
g_Vars.Aimbot.Bone = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.awp.bone", 0, g_Vars.ini);
g_Vars.Triggerbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.awp.enabled", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep1 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.awp.sleep.1", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep2 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.awp.sleep.2", 0, g_Vars.ini);
break;
case WEAPON_G3SG1:
g_Vars.Aimbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.g3sg1.enabled", 0, g_Vars.ini);
g_Vars.Aimbot.FOV = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.g3sg1.fov", 0, g_Vars.ini);
g_Vars.Aimbot.Smooth = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.g3sg1.smooth", 0, g_Vars.ini);
g_Vars.Aimbot.Bone = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.g3sg1.bone", 0, g_Vars.ini);
g_Vars.Triggerbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.g3sg1.enabled", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep1 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.g3sg1.sleep.1", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep2 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.g3sg1.sleep.2", 0, g_Vars.ini);
break;
case WEAPON_SCAR20:
g_Vars.Aimbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.scar.enabled", 0, g_Vars.ini);
g_Vars.Aimbot.FOV = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.scar.fov", 0, g_Vars.ini);
g_Vars.Aimbot.Smooth = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.scar.smooth", 0, g_Vars.ini);
g_Vars.Aimbot.Bone = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.scar.bone", 0, g_Vars.ini);
g_Vars.Triggerbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.scar.enabled", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep1 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.scar.sleep.1", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep2 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.scar.sleep.2", 0, g_Vars.ini);
break;
case WEAPON_M249:
g_Vars.Aimbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.m249.enabled", 0, g_Vars.ini);
g_Vars.Aimbot.FOV = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.m249.fov", 0, g_Vars.ini);
g_Vars.Aimbot.Smooth = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.m249.smooth", 0, g_Vars.ini);
g_Vars.Aimbot.Bone = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.m249.bone", 0, g_Vars.ini);
g_Vars.Aimbot.RCS = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.m249.rcs", 0, g_Vars.ini);
g_Vars.Triggerbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.m249.enabled", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep1 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.m249.sleep.1", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep2 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.m249.sleep.2", 0, g_Vars.ini);
break;
case WEAPON_NEGEV:
g_Vars.Aimbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.negev.enabled", 0, g_Vars.ini);
g_Vars.Aimbot.FOV = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.negev.fov", 0, g_Vars.ini);
g_Vars.Aimbot.Smooth = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.negev.smooth", 0, g_Vars.ini);
g_Vars.Aimbot.Bone = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.negev.bone", 0, g_Vars.ini);
g_Vars.Aimbot.RCS = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.negev.rcs", 0, g_Vars.ini);
g_Vars.Triggerbot.Enabled = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.negev.enabled", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep1 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.negev.sleep.1", 0, g_Vars.ini);
g_Vars.Triggerbot.Sleep2 = g_Module.GetPrivateProfileFloat("Horcrux", "weapon.triggerbot.negev.sleep.2", 0, g_Vars.ini);
break;
}
return true;
}
bool Info()
{
if (UPD == 100)
{
InitializeSettings();
GetWeaponInfos();
g_Vars.hPID = g_Module.GetProcID("csgo.exe");
g_Vars.HandleWindow = FindWindowA("Valve001", 0);
g_Vars.hProcess = OpenProcess(PROCESS_ALL_ACCESS, false, g_Vars.hPID);
g_Vars.hSnapshot = CreateToolhelp32Snapshot(TH32CS_SNAPMODULE, g_Vars.hPID);
g_Vars.dwClient = g_Module.GetModule(g_Vars.hSnapshot, "client_panorama.dll");
g_Vars.dwEngine = g_Module.GetModule(g_Vars.hSnapshot, "engine.dll");
UPD = 0;
}
UPD = UPD + 1;
return true;
}
int CALLBACK WinMain(HINSTANCE hInstance, HINSTANCE hPrevInstance, LPSTR lpCmdLine, int nCmdShow)
{
if (!g_Module.SetDebugPrivilege())
{
MessageBox(0, "Failed to alloc privileges.", "AnkFEST Aimbot", MB_OK | MB_ICONERROR);
ExitProcess(0);
}
if (!InitializeSettings())
{
MessageBox(0, "Failed to parse settings.", "AnkFEST Aimbot", MB_OK | MB_ICONERROR);
ExitProcess(0);
}
if (!GetWeaponInfos())
{
MessageBox(0, "Failed to get weapon information.", "AnkFEST Aimbot", MB_OK | MB_ICONERROR);
ExitProcess(0);
}
Info();
CreateThread(0, 0, (LPTHREAD_START_ROUTINE)Aimbot, 0, 0, 0);
CreateThread(0, 0, (LPTHREAD_START_ROUTINE)Triggerbot, 0, 0, 0);
if (GetAsyncKeyState(g_Vars.ReloadKey))
{
InitializeSettings();
// are settings updated?
}
while (1)
{
if (!Info())
{
MessageBox(0, "Failed to load game thread.", "AnkFEST Aimbot", MB_OK | MB_ICONERROR);
ExitProcess(0);
}
Sleep(5);
}
}<file_sep>/AnkFEST/H_Weapons.h
#include "H_Include.h"
#define WEAPON_NONE 0
#define WEAPON_KNIFE 42
#define WEAPON_KNIFEGG 41
#define WEAPON_TASER 31
#define WEAPON_GLOCK 4
#define WEAPON_ELITE 2
#define WEAPON_P250 36
#define WEAPON_TEC9 30
#define WEAPON_DEAGLE 1
#define WEAPON_HKP2000 32
#define WEAPON_USP 61
#define WEAPON_FIVESEVEN 3
#define WEAPON_CZ75 63
#define WEAPON_NOVA 35
#define WEAPON_XM1014 25
#define WEAPON_SAWEDOFF 29
#define WEAPON_MAG7 27
#define WEAPON_MAC10 17
#define WEAPON_MP7 33
#define WEAPON_UMP45 24
#define WEAPON_P90 19
#define WEAPON_MP9 34
#define WEAPON_BIZON 26
#define WEAPON_GALILAR 13
#define WEAPON_AK47 7
#define WEAPON_SG553 39
#define WEAPON_M4A4 16
#define WEAPON_M4A1 60
#define WEAPON_FAMAS 10
#define WEAPON_AUG 8
#define WEAPON_SSG08 40
#define WEAPON_AWP 9
#define WEAPON_G3SG1 11
#define WEAPON_SCAR20 38
#define WEAPON_M249 14
#define WEAPON_NEGEV 28
#define WEAPON_HE 44
#define WEAPON_FLASH 43
#define WEAPON_SMOKE 45
#define WEAPON_MOLOTOV 46
#define WEAPON_DECOY 47
#define WEAPON_INCGRENADE 48
#define WEAPON_C4 49
class C_Nospread
{
public:
bool IsBadWeapon(int weaponid)
{
return(weaponid == WEAPON_KNIFEGG || weaponid == WEAPON_KNIFE || weaponid == WEAPON_NONE || weaponid == WEAPON_FLASH || weaponid == WEAPON_SMOKE || weaponid == WEAPON_MOLOTOV || weaponid == WEAPON_DECOY || weaponid == WEAPON_INCGRENADE || weaponid == WEAPON_C4);
}
bool IsNonRcs(int weaponid)
{
return(weaponid == WEAPON_DEAGLE || weaponid == WEAPON_ELITE || weaponid == WEAPON_FIVESEVEN || weaponid == WEAPON_GLOCK || weaponid == WEAPON_P250 || weaponid == WEAPON_HKP2000 || weaponid == WEAPON_USP || weaponid == WEAPON_TEC9);
}
}; extern C_Nospread g_Nospread;
<file_sep>/AnkFEST/H_Include.h
#pragma once
// ~ Disable warnings ~
#pragma warning(disable: 4244)
#pragma warning(disable: 4305)
#pragma warning(disable: 4996)
#pragma warning(disable: 4715)
// ~ Standard includes ~
#include <Windows.h>
#include <fstream>
#include <TlHelp32.h>
#include <stdio.h>
#include <math.h>
#include <iostream>
#include <cstring>
#include <string>
#include <vector>
using namespace std;
// ~ User includes
#include "H_Offsets.h"
#include "H_Globals.h"
#include "H_Module.h"
#include "H_Math.h"
#include "H_Weapons.h"
|
9c02f04f2c716de361277a62d36f299e9feb2f9d
|
[
"Markdown",
"C",
"C++"
] | 8 |
Markdown
|
FriXeee/CSGO-Legit-Hack
|
67529d34a1ec3e982bd1fddd74f7466bee35eba2
|
8df3e0a92956eaa6953bf1e8dbd084b04b476645
|
refs/heads/master
|
<file_sep>http://dreamthink.github.io/starwarsmadlibs
A Star Wars Mad Libs game using AngularJS.
May the Force be with you...<file_sep>var app = angular.module("myApp", ['ngAnimate']);
app.controller("MyController", function() {
// set initial input values as blank
this.pluralnoun1 = "";
this.verb1 = "";
this.pluralnoun2 = "";
this.pluralnoun3 = "";
this.pluralnoun4 = "";
this.noun1 = "";
this.adjective1 = "";
this.pluralnoun5 = "";
this.noun2 = "";
this.noun3 = "";
this.noun4 = "";
this.verbEndingInIng = "";
this.noun5 = "";
this.verb2 = "";
this.adverb1 = "";
this.noun6 = "";
// set input section to show
this.showInputSection = true;
// set text section to not show
this.showTextSection = false;
// show text section, hide input
this.showMadLibs = function() {
if (this.myForm.$valid) {
this.showInputSection = false;
this.showTextSection = true;
}
};
this.resetMadLibs = function() {
this.pluralnoun1 = "";
this.verb1 = "";
this.pluralnoun2 = "";
this.pluralnoun3 = "";
this.pluralnoun4 = "";
this.noun1 = "";
this.adjective1 = "";
this.pluralnoun5 = "";
this.noun2 = "";
this.noun3 = "";
this.noun4 = "";
this.verbEndingInIng = "";
this.noun5 = "";
this.verb2 = "";
this.adverb1 = "";
this.noun6 = "";
this.showTextSection = false;
this.showInputSection = true;
this.myForm.$setPristine();
};
});
|
007edc345a6c35f86387b4be31a06365c4432dbd
|
[
"Markdown",
"JavaScript"
] | 2 |
Markdown
|
dreamthink/StarWarsMadLibs
|
1cfd3596721a99e8ac2ad8a89b43d51f143c4500
|
c781e93da0ae281cd2c00001b977a018a40b2d15
|
refs/heads/master
|
<file_sep>import tensorflow as tf
from params import Params
class DDQNet():
def __init__(self, action_space):
self.IMG_X = Params['IMG_X']
self.IMG_Y = Params['IMG_Y']
self.IMG_Z = Params['IMG_Z']
self.action_space = action_space
self.learning_rate = Params['LEARNING_RATE']
self.primary_scope = 'primary'
self.target_scope = 'target'
self.reward_discount = 0.99
self.dueling_nn()
def dueling_nn(self):
with tf.variable_scope(self.primary_scope) as scope:
self.primary_in, self.primary_out = self.build_nn()
with tf.variable_scope(self.target_scope) as scope:
self.target_in, self.target_out = self.build_nn()
self.end_game = tf.placeholder(shape=[None],dtype=tf.float32)
self.current_reward = tf.placeholder(shape=[None],dtype=tf.float32)
self.actions = tf.placeholder(shape=[None],dtype=tf.int32)
next_Q = tf.reduce_max(self.target_out, axis = 1)
targetQ = self.current_reward + self.reward_discount * tf.multiply(1 - self.end_game, next_Q)
targetQ = tf.stop_gradient(targetQ)
actions_onehot = tf.one_hot(self.actions, self.action_space, dtype=tf.float32)
Q = tf.reduce_sum((self.primary_out * actions_onehot), reduction_indices=1)
loss = tf.reduce_mean(tf.square(targetQ - Q))
# training
self.update = tf.train.AdamOptimizer(learning_rate = self.learning_rate).minimize(loss)
# predict action according to the target network
self.predict = tf.argmax(self.target_out, axis = 1)
# synchronize two networks
from_variables = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope=self.primary_scope)
to_variables = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope=self.target_scope)
self.sync_op = []
for from_var, to_var in zip(from_variables, to_variables):
self.sync_op.append(to_var.assign(from_var.value()))
def build_nn(self):
# [batch, in_height, in_width, in_channels]
# assuming input to be batch_size*84*84*4
state_in = tf.placeholder(tf.float32, shape=[None, self.IMG_X, self.IMG_Y, self.IMG_Z])
state_resized = tf.image.resize_images(state_in, [80, 80])
##########################################################
#[filter_height, filter_width, in_channels, out_channels]
# conv layer 1, 8*8*32 filters, 4 stride
conv1_W = tf.Variable(tf.truncated_normal([8, 8, self.IMG_Z, 32], stddev = 0.01))
conv1_b = tf.Variable(tf.truncated_normal([1, 20, 20, 32], stddev = 0.01))
conv1_strides = [1, 4, 4, 1]
#output 20*20*32
conv1_out = tf.nn.conv2d(state_resized, conv1_W, conv1_strides,
padding = 'SAME') + conv1_b
conv1_out = tf.nn.relu(conv1_out)
###########################################################
# conv layer 2, 4*4*64 filters, 2 stride
conv2_W = tf.Variable(tf.truncated_normal([4, 4, 32, 64], stddev = 0.01))
conv2_b = tf.Variable(tf.truncated_normal([1, 9, 9, 64], stddev = 0.01))
conv2_strides = [1, 2, 2, 1]
# output 9*9*64
conv2_out = tf.nn.conv2d(conv1_out, conv2_W, conv2_strides,
padding = 'VALID') + conv2_b
conv2_out = tf.nn.relu(conv2_out)
###########################################################
# fully connected layer 1, (7*7*64 = 3136) * 512
ff1_input = tf.reshape(conv2_out, [-1, 5184])
ff1_W = tf.Variable(tf.truncated_normal([5184, 256], stddev = 0.01))
ff1_b = tf.Variable(tf.truncated_normal([1, 256], stddev = 0.01))
# output batch_size * 512
ff1_out = tf.matmul(ff1_input, ff1_W) + ff1_b
ff1_out = tf.nn.relu(ff1_out)
advantage_in, value_in = tf.split(ff1_out, 2, axis = 1)
advantage_W = tf.Variable(tf.truncated_normal([128, self.action_space], stddev = 0.01))
value_W = tf.Variable(tf.truncated_normal([128, 1], stddev = 0.01))
advantage_out = tf.matmul(advantage_in, advantage_W)
value_out = tf.matmul(value_in, value_W)
#Then combine them together to get our final Q-values.
Q_out = value_out + advantage_out - tf.reduce_mean(advantage_out,reduction_indices=1,keep_dims=True)
return state_in, Q_out
def sync_variables(self, sess):
# adding scope to network
sess.run(self.sync_op)
def train(self, sess, state_current, state_future, action, reward, end_game):
sess.run(self.update, feed_dict={self.target_in: state_future,
self.primary_in: state_current,
self.actions: action,
self.current_reward: reward,
self.end_game: end_game})
def predict_act(self, sess, state):
# 1X80X80X4 single image
action = sess.run(self.predict,
feed_dict = {self.target_in: state})
return action
<file_sep>import pickle
import os
import tensorflow as tf
import numpy as np
from params import Params
class Logger(object):
def __init__(self, sess, saver):
# RawValue because we don't need it to create a Lock:
self.global_episode = 0
self.running_reward = None
self.save_freq = Params['SAVE_FREQ']
self.save_path = Params['SAVE_PATH']
self.__saver = saver
self.__sess = sess
self.reward_log = []
if not os.path.exists(self.save_path):
os.makedirs(self.save_path)
def log(self, reward_sum):
self.running_reward = reward_sum if self.running_reward is None else self.running_reward * 0.99 + reward_sum * 0.01
self.global_episode += 1
if self.global_episode % 10 == 0:
self.reward_log.append(self.running_reward)
print('Ep {}: reward: {}, running average: {:3f}'.format(self.global_episode, reward_sum, self.running_reward))
else:
print('Ep {}: reward: {}'.format(self.global_episode, reward_sum))
if self.global_episode % self.save_freq == 0:
self.save()
def save(self):
f = open(self.save_path + 'reward_log.cptk','wb')
pickle.dump(self.reward_log, f)
f.close()
self.__saver.save(self.__sess, self.save_path+'model-'+str(self.global_episode)+'.cptk')
def restore(self):
try:
ckpt = tf.train.get_checkpoint_state(self.save_path)
load_path = ckpt.model_checkpoint_path
self.__saver.restore(self.__sess, load_path)
f = open(self.save_path + 'reward_log.cptk','rb')
self.reward_log = pickle.load(f)
# taking the average of last 100 episode...
self.running_reward = np.mean(self.reward_log[-10:])
f.close()
print('Network variables restored!')
return True
except:
print('Cannot restore variables')
return False
<file_sep>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Apr 12 21:19:09 2017
@author: shengx
"""
import tensorflow as tf
import numpy as np
import random
import gym
import pickle
import os
Params = {
'GAME': 'Pong-v0',
'LEARNING_RATE': 0.00025,
'BATCH_SIZE': 32,
'REWARD_DISCOUNT': 0.99,
'RANDOM_ACTION_PROB_START': 0.9,
'RANDOM_ACTION_PROB_END': 0.1,
'ANNEALING_STEP': 50000,
'FRAME_SKIP': 2,
'SYNC_FREQ': 2000,
'UPDATE_FREQ': 4,
'SAVE_FREQ': 1000,
'MEMORY_BUFFER_SIZE': 20000,
'SAVE_PATH': './log/',
'IMG_X': 105,
'IMG_Y': 80,
'IMG_Z': 4}
<file_sep>#!/bin/bash -l
# Specify the project name
#$-P dlearn
# Specify the time limit
#$-l h_rt=48:00:00
# Job Name
#$-N medium
# Send email at the end of the job
#$-m ae
# Join error and output streams
#$-j y
# Specify the number of cores
#$-pe omp 4
#gpu requirement
#$-l gpus=1
#gpu capability
#-l gpu_c=2.5
#Load modules:
module load cuda/8.0
module load cudnn/5.1
module load python/3.6.0
module load tensorflow/r1.0_python-3.6.0
#Run the program
#Run a model on smallGrid layout for 6000 episodes, of which 5000 episodes are used for training
#python3 pacman.py -p PacmanDQN -n 6000 -x 5000 -l smallGrid
python3 pacman.py -p PacmanDQN -n 4000 -x 3800 -l mediumClassic -q --numHistory 4
<file_sep>import tensorflow as tf
import numpy as np
import gym
from dpn import DPNet
from logger import Logger
from params import Params
class Agent():
def __init__(self):
self.env = gym.make(Params['GAME'])
# setting up parameters
self.frame_skip = Params['FRAME_SKIP']
self.reward_discount = Params['REWARD_DISCOUNT']
self.IMG_X = Params['IMG_X']
self.IMG_Y = Params['IMG_Y']
self.action_space = self.env.action_space.n
self.updates = 0
self.nn = DPNet(self.action_space)
# initialize variables
self.sess = tf.Session()
self.saver = tf.train.Saver()
self.sess.run(tf.global_variables_initializer())
# restore variables
self.logger = Logger(self.sess, self.saver)
self.logger.restore()
def run(self):
while True:
reward_sum = 0
observation = self.env.reset()
state_sequence = []
action_sequence = []
reward_sequence = []
state = np.zeros((self.IMG_X, self.IMG_Y, 4), dtype = 'float32')
state[:,:,-1] = self.process_frame(observation)
while True:
# select an action based on the predicted policy
current_state = np.expand_dims(state[:,:,-1] - state[:,:,-2], axis = 2)
observation, action, reward, done = self.take_action(current_state)
reward_sum += reward
# save the current state
state_sequence.append(current_state)
action_sequence.append(action)
reward_sequence.append(reward)
# update the new state and reward
state = np.roll(state, -1, axis = 2)
state[:, :, -1] = self.process_frame(observation)
# save the model after every 200 updates
if done:
self.update_nn(state_sequence, action_sequence, reward_sequence)
self.logger.log(reward_sum)
break
def take_action(self, current_state):
# take an action according to the policy
action_policy = self.nn.predict_policy(self.sess, np.expand_dims(current_state, axis = 0))
action = np.random.choice(self.action_space, p=np.squeeze(action_policy))
# excute the action for a few steps
reward = 0
for _ in range(self.frame_skip):
observation, reward_temp, done, info = self.env.step(action)
reward += reward_temp
if done:
break
return (observation, action, reward, done)
def update_nn(self, states, actions, rewards):
# calculate future discounted rewards
future_rewards = np.zeros((len(rewards)))
running_add = 0
for t in reversed(range(0, len(rewards))):
if rewards[t] != 0: running_add = 0
running_add = running_add * self.reward_discount + rewards[t]
future_rewards[t] = running_add
self.nn.train(self.sess, states, actions, future_rewards)
def test(self):
while True:
observation = self.env.reset()
state = np.zeros((1, self.IMG_X, self.IMG_Y, 4), dtype = 'float32')
state[0, :,:,-1] = self.process_frame(observation)
while True:
self.env.render()
# select an action based on the predicted policy
observation, action, reward, done = self.take_action(state)
# update the new state and reward
state = np.roll(state, -1, axis = 3)
state[0, :, :, -1] = self.process_frame(observation)
# save the model after every 200 updates
if done:
break
def process_frame(self, frame):
#frame_gray = frame * np.array(([0.21, 0.72, 0.07])) / 256
# output shape 105X80
return np.mean(frame[::2,::2], axis = 2, dtype = 'float32') / 256
def reset_game(self):
pass
<file_sep># Modified version of: https://github.com/mrkulk/deepQN_tensorflow
import numpy as np
import tensorflow as tf
class SimpleQ:
def __init__(self, params):
self.params = params
self.network_name = 'qnet'
self.sess = tf.Session()
#self.x = tf.placeholder('float', [None, params['width'],params['height'],1],name=self.network_name + '_x')
self.x = tf.placeholder('float', [None, params['height'],params['width'],params['history']],name=self.network_name + '_x')
self.q_t = tf.placeholder('float', [None], name=self.network_name + '_q_t')
self.actions = tf.placeholder("float", [None, 4], name=self.network_name + '_actions')
self.rewards = tf.placeholder("float", [None], name=self.network_name + '_rewards')
self.terminals = tf.placeholder("float", [None], name=self.network_name + '_terminals')
o2_shape = self.x.get_shape().as_list()
# Layer 1 (Fully connected)
layer_name = 'fc1' ; hiddens = 256 ; dim = o2_shape[1]*o2_shape[2]*o2_shape[3]
self.o2_flat = tf.reshape(self.x, [-1,dim],name=self.network_name + '_'+layer_name+'_input_flat')
self.w3 = tf.Variable(tf.random_normal([dim,hiddens], stddev=0.01),name=self.network_name + '_'+layer_name+'_weights')
self.b3 = tf.Variable(tf.constant(0.1, shape=[hiddens]),name=self.network_name + '_'+layer_name+'_biases')
self.ip3 = tf.add(tf.matmul(self.o2_flat,self.w3),self.b3,name=self.network_name + '_'+layer_name+'_ips')
self.o3 = tf.nn.relu(self.ip3,name=self.network_name + '_'+layer_name+'_activations')
# Layer 2
layer_name = 'fc2' ; hiddens = 4 ; dim = 256
self.w4 = tf.Variable(tf.random_normal([dim,hiddens], stddev=0.01),name=self.network_name + '_'+layer_name+'_weights')
self.b4 = tf.Variable(tf.constant(0.1, shape=[hiddens]),name=self.network_name + '_'+layer_name+'_biases')
self.y = tf.add(tf.matmul(self.o3,self.w4),self.b4,name=self.network_name + '_'+layer_name+'_outputs')
#Q,Cost,Optimizer
self.discount = tf.constant(self.params['discount'])
self.yj = tf.add(self.rewards, tf.multiply(1.0-self.terminals, tf.multiply(self.discount, self.q_t)))
self.Q_pred = tf.reduce_sum(tf.multiply(self.y,self.actions), reduction_indices=1)
self.cost = tf.reduce_sum(tf.pow(tf.subtract(self.yj, self.Q_pred), 2))
if self.params['load_file'] is not None:
self.global_step = tf.Variable(int(self.params['load_file'].split('_')[-1]),name='global_step', trainable=False)
else:
self.global_step = tf.Variable(0, name='global_step', trainable=False)
self.rmsprop = tf.train.RMSPropOptimizer(self.params['lr'],self.params['rms_decay'],0.0,self.params['rms_eps']).minimize(self.cost,global_step=self.global_step)
self.saver = tf.train.Saver(max_to_keep=0)
self.sess.run(tf.initialize_all_variables())
if self.params['load_file'] is not None:
print('Loading checkpoint...')
self.saver.restore(self.sess,self.params['load_file'])
def train(self,states,actions,rewards,nstate,terminals):
feed_dict={self.x: nstate, self.q_t: np.zeros(nstate.shape[0]), self.actions: actions, self.terminals:terminals, self.rewards: rewards}
q_t = self.sess.run(self.y,feed_dict=feed_dict)
q_t = np.amax(q_t, axis=1)
feed_dict={self.x: states, self.q_t: q_t, self.actions: actions, self.terminals:terminals, self.rewards: rewards}
_,cnt,cost = self.sess.run([self.rmsprop,self.global_step,self.cost],feed_dict=feed_dict)
return cnt, cost
def save_ckpt(self,filename):
self.saver.save(self.sess, self.params['save_file'])
<file_sep>import tensorflow as tf
from params import Params
import resource
class A3CNet():
def __init__(self, scope, action_space, session, optimizer, global_scope = None):
self.IMG_X = Params['IMG_X']
self.IMG_Y = Params['IMG_Y']
self.IMG_Z = Params['IMG_Z']
self.entropy_penalty = Params['ENTROPY_PENALTY']
self.learning_rate = Params['LEARNING_RATE']
self.entropy_reg = Params['ENTROPY_PENALTY']
self.rnn_h_units = Params['RNN_H_UNITS']
self.__scope = scope
self.__sess = session
self.action_space = action_space
with tf.variable_scope(self.__scope):
self.local_dict = self.build_nn()
# apply gradients and sync variables
if global_scope is not None:
global_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope=global_scope)
self.apply_gradient = optimizer.apply_gradients(zip(self.local_dict['gradients'], global_vars))
self.sync_op = []
for from_var, to_var in zip(global_vars, self.local_dict['variables']):
self.sync_op.append(to_var.assign(from_var.value()))
def build_nn(self):
# [batch, in_height, in_width, in_channels]
# assuming input to be batch_size*84*84*4
state_in = tf.placeholder(tf.float32, shape=[None, self.IMG_X, self.IMG_Y, self.IMG_Z])
state_resized = tf.image.resize_images(state_in, [80, 80])
##########################################################
#[filter_height, filter_width, in_channels, out_channels]
# conv layer 1, 8*8*32 filters, 4 stride
conv1_W = tf.Variable(tf.truncated_normal([8, 8, self.IMG_Z, 32],
stddev = 0.01))
conv1_b = tf.Variable(tf.truncated_normal([1, 20, 20, 32],
stddev = 0.01))
conv1_strides = [1, 4, 4, 1]
#output 20*20*32
conv1_out = tf.nn.conv2d(state_resized, conv1_W, conv1_strides, padding = 'SAME') + conv1_b
conv1_out = tf.nn.relu(conv1_out)
###########################################################
# conv layer 2, 4*4*64 filters, 2 stride
conv2_W = tf.Variable(tf.truncated_normal([4, 4, 32, 64], stddev = 0.01))
conv2_b = tf.Variable(tf.truncated_normal([1, 9, 9, 64], stddev = 0.01))
conv2_strides = [1, 2, 2, 1]
# output 9*9*64
conv2_out = tf.nn.conv2d(conv1_out, conv2_W, conv2_strides, padding = 'VALID') + conv2_b
conv2_out = tf.nn.relu(conv2_out)
###########################################################
# fully connected layer 1, (7*7*64 = 3136) * 512
ff1_input = tf.reshape(conv2_out, [-1, 5184])
ff1_W = tf.Variable(tf.truncated_normal([5184, self.rnn_h_units], stddev = 0.01))
ff1_b = tf.Variable(tf.truncated_normal([1, self.rnn_h_units], stddev = 0.01))
# output batch_size * 512
ff1_out = tf.matmul(ff1_input, ff1_W) + ff1_b
ff1_out = tf.nn.relu(ff1_out)
############################################################
# recurrent layer
rnn_in = tf.reshape(ff1_out, [1, -1, self.rnn_h_units])
rnn_cell = tf.contrib.rnn.GRUCell(num_units = self.rnn_h_units)
rnn_h_in = tf.placeholder(shape=[None, self.rnn_h_units],dtype=tf.float32)
rnn, rnn_state_out = tf.nn.dynamic_rnn(inputs=rnn_in,
cell=rnn_cell,
dtype=tf.float32,
initial_state=rnn_h_in)
rnn_out = tf.reshape(rnn, [-1, self.rnn_h_units])
############################################################
# output layer
policy_W = tf.Variable(tf.truncated_normal([self.rnn_h_units, self.action_space], stddev = 0.01))
policy_b = tf.Variable(tf.truncated_normal([1, self.action_space], stddev = 0.01))
policy_out = tf.nn.softmax(tf.matmul(rnn_out, policy_W) + policy_b)
value_W = tf.Variable(tf.truncated_normal([self.rnn_h_units, 1], stddev = 0.01))
value_b = tf.Variable(tf.truncated_normal([1, 1], stddev = 0.01))
value_out = tf.matmul(rnn_out, value_W) + value_b
###########################################################
# prediction, loss, and update
actions = tf.placeholder(shape=[None],dtype=tf.int32)
R = tf.placeholder(shape= [None], dtype=tf.float32)
actions_onehot = tf.one_hot(actions, self.action_space, dtype=tf.float32)
action_policy = tf.reduce_sum(policy_out * actions_onehot, axis = 1)
policy_loss = -tf.log(action_policy + 1e-6) * (R - tf.stop_gradient(tf.reshape(value_out,[-1])))
V_loss = 0.5 * tf.square(R - tf.reshape(value_out,[-1]))
entropy = policy_out * tf.log(policy_out + 1e-6)
total_loss = tf.reduce_sum(policy_loss) + tf.reduce_sum(V_loss) + self.entropy_reg * tf.reduce_sum(entropy)
##########################################################
# updates
variables = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope=self.__scope)
grad = tf.gradients(total_loss, variables)
model_dict = {'state_in': state_in, 'action_in': actions, 'R_in': R, 'rnn_h_in': rnn_h_in,
'policy_out': policy_out, 'value_out': value_out, 'rnn_state_out': rnn_state_out,
'gradients': grad, 'variables': variables}
return model_dict
def variable_list(self):
return tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope=self.__scope.name)
def predict_value(self, state, rnn_state):
#a1=resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
value = self.__sess.run(self.local_dict['value_out'],
feed_dict = {self.local_dict['state_in']: state,
self.local_dict['rnn_h_in']: rnn_state})
#print('9id: %d, Memory usage: %s (kb)' % (1,resource.getrusage(resource.RUSAGE_SELF).ru_maxrss-a1))
return value
def predict_policy(self, state, rnn_state):
# 1X80X80X4 single image
#a1=resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
policy, rnn_state = self.__sess.run([self.local_dict['policy_out'], self.local_dict['rnn_state_out']],
feed_dict = {self.local_dict['state_in']: state,
self.local_dict['rnn_h_in']: rnn_state})
#print('9id: %d, Memory usage: %s (kb)' % (1,resource.getrusage(resource.RUSAGE_SELF).ru_maxrss-a1))
return policy, rnn_state
def sync_variables(self):
self.__sess.run(self.sync_op)
def update_global(self, state, action, R, rnn_state_in):
#a1=resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
self.__sess.run(self.apply_gradient,
feed_dict = {
self.local_dict['state_in']: state,
self.local_dict['action_in']: action,
self.local_dict['rnn_h_in']: rnn_state_in,
self.local_dict['R_in']: R})
#print('9id: %d, Memory usage: %s (kb)' % (1,resource.getrusage(resource.RUSAGE_SELF).ru_maxrss-a1))
<file_sep>#%%
from agent import Agent
a = Agent()
a.run()
<file_sep>import numpy as np
import random
class replayMemory():
def __init__(self, IMG_X, IMG_Y, size):
# [i, :, :, 0:4] is the current state
# [i, :, :, 1:5] is the next state
self.frames = np.zeros((size, IMG_X, IMG_Y, 5), dtype = 'float32')
self.actions = np.zeros((size), dtype = 'int32')
self.rewards = np.zeros((size), dtype = 'float32')
self.done = np.zeros((size), dtype = 'int32')
self.__counter = 0
self.__size = size
def add(self, state, action, reward, done):
self.frames[self.__counter, :, :, : ] = state
self.actions[self.__counter] = action
self.rewards[self.__counter] = reward
self.done[self.__counter] = done
self.__counter += 1
self.__counter = self.__counter % self.__size
def makeBatch(self, batch_size):
# randomly sample a batch
idx = random.sample(range(self.__size), batch_size)
return (self.frames[idx, :, :, 0:4], self.frames[idx, :, :, 1:5], self.actions[idx], self.rewards[idx], self.done[idx])<file_sep># Deep Reinforcement Learning for Playing Atari Games
Final project for BU EC500 K1/CS591 S2 Deep Learning
Source code: https://github.com/xiao281926365/Deep-Reinforcement-Learning-for-Playing-Atari-Games.
## Implemented Methods: ###
1. Deep Policy Network
2. Dueling Double Deep Q Network
3. Dueling Double Deep Q Network with LSTM
4. Asynchronous Advantage Actor Critic
5. Asynchronous Advantage Actor Critic with GRU
## Requirement: ###
* Python 3.6
* [Tensorflow 1.0](https://www.tensorflow.org/install/install_linux)
* [OpenAI Gym](https://github.com/openai/gym)
## To Run: ###
To train the model:
```bash
python3 main.py
```
To train other games or change model parameter, edit the corresponding params.py file.
## Reference: ###
* [Simple Reinforcement Learning with Tensorflow](https://medium.com/emergent-future/simple-reinforcement-learning-with-tensorflow-part-0-q-learning-with-tables-and-neural-networks-d195264329d0)
* [Human-level control through deep reinforcement learning](http://www.nature.com/nature/journal/v518/n7540/full/nature14236.html)
* [Dueling Network Architectures for Deep Reinforcement Learning](https://arxiv.org/abs/1511.06581)
* [Asynchronous Methods for Deep Reinforcement Learning](https://arxiv.org/abs/1602.01783)
* [Reinforcement Learning through Asynchronous Advantage Actor-Critic on a GPU](https://arxiv.org/abs/1611.06256)<file_sep>import tensorflow as tf
from params import Params
class DDQNet():
def __init__(self, action_space):
self.IMG_X = Params['IMG_X']
self.IMG_Y = Params['IMG_Y']
self.action_space = action_space
self.learning_rate = Params['LEARNING_RATE']
self.rnn_h_units = Params['RNN_H_UNIT']
self.primary_scope = 'primary'
self.target_scope = 'target'
self.reward_discount = 0.99
self.dueling_nn()
def dueling_nn(self):
with tf.variable_scope(self.primary_scope) as scope:
self.primary_dict = self.build_nn()
with tf.variable_scope(self.target_scope) as scope:
self.target_dict = self.build_nn()
self.end_game = tf.placeholder(shape=[None],dtype=tf.float32)
self.current_reward = tf.placeholder(shape=[None],dtype=tf.float32)
self.actions = tf.placeholder(shape=[None],dtype=tf.int32)
self.trainLength = tf.placeholder(tf.int32)
next_Q = tf.reduce_max(self.target_dict['Q_out'], axis = 1)
targetQ = self.current_reward + self.reward_discount * tf.multiply(1 - self.end_game, next_Q)
targetQ = tf.stop_gradient(targetQ)
actions_onehot = tf.one_hot(self.actions, self.action_space, dtype=tf.float32)
Q = tf.reduce_sum((self.primary_dict['Q_out'] * actions_onehot), reduction_indices=1)
maskA = tf.zeros([self.primary_dict['batch_size_in'], self.trainLength//2])
maskB = tf.ones([self.primary_dict['batch_size_in'], self.trainLength//2])
mask = tf.concat([maskA,maskB],1)
mask = tf.reshape(mask,[-1])
masked_loss = tf.multiply(targetQ - Q, mask)
loss = tf.reduce_mean(tf.square(masked_loss))
# training
self.update = tf.train.AdamOptimizer(learning_rate = self.learning_rate).minimize(loss)
# predict action according to the target network
self.predict = tf.argmax(self.primary_dict['Q_out'], axis = 1)
# synchronize two networks
from_variables = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope=self.primary_scope)
to_variables = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope=self.target_scope)
self.sync_op = []
for from_var, to_var in zip(from_variables, to_variables):
self.sync_op.append(to_var.assign(from_var.value()))
def build_nn(self):
# [batch, in_height, in_width, in_channels]
# assuming input to be batch_size*84*84*4
state_in = tf.placeholder(tf.float32, shape=[None, self.IMG_X, self.IMG_Y, 1])
state_resized = tf.image.resize_images(state_in, [80, 80])
##########################################################
#[filter_height, filter_width, in_channels, out_channels]
# conv layer 1, 8*8*32 filters, 4 stride
conv1_W = tf.Variable(tf.truncated_normal([8, 8, 1, 32], stddev = 0.01))
conv1_b = tf.Variable(tf.truncated_normal([1, 20, 20, 32], stddev = 0.01))
conv1_strides = [1, 4, 4, 1]
#output 20*20*32
conv1_out = tf.nn.conv2d(state_resized, conv1_W, conv1_strides,
padding = 'SAME') + conv1_b
conv1_out = tf.nn.relu(conv1_out)
###########################################################
# conv layer 2, 4*4*64 filters, 2 stride
conv2_W = tf.Variable(tf.truncated_normal([4, 4, 32, 64], stddev = 0.01))
conv2_b = tf.Variable(tf.truncated_normal([1, 9, 9, 64], stddev = 0.01))
conv2_strides = [1, 2, 2, 1]
# output 9*9*64
conv2_out = tf.nn.conv2d(conv1_out, conv2_W, conv2_strides,
padding = 'VALID') + conv2_b
conv2_out = tf.nn.relu(conv2_out)
###########################################################
# fully connected layer 1, (7*7*64 = 3136) * 512
ff1_input = tf.reshape(conv2_out, [-1, 5184])
ff1_W = tf.Variable(tf.truncated_normal([5184, self.rnn_h_units], stddev = 0.01))
ff1_b = tf.Variable(tf.truncated_normal([1, self.rnn_h_units], stddev = 0.01))
# output batch_size * 512
ff1_out = tf.nn.relu(tf.matmul(ff1_input, ff1_W) + ff1_b)
############################################################
# recurrent layer
batch_size = tf.placeholder(tf.int32)
rnn_in = tf.reshape(ff1_out, [batch_size, -1, self.rnn_h_units])
rnn_cell = tf.contrib.rnn.core_rnn_cell.LSTMCell(num_units = self.rnn_h_units)
rnn_c_in = tf.placeholder(shape=[None, self.rnn_h_units],dtype=tf.float32)
rnn_h_in = tf.placeholder(shape=[None, self.rnn_h_units],dtype=tf.float32)
rnn_state_in = tf.contrib.rnn.LSTMStateTuple(rnn_c_in, rnn_h_in)
rnn, rnn_state_out = tf.nn.dynamic_rnn(inputs=rnn_in,
cell=rnn_cell,
dtype=tf.float32,
initial_state=rnn_state_in)
rnn_out = tf.reshape(rnn, [-1, self.rnn_h_units])
##############################################################
advantage_in, value_in = tf.split(rnn_out, 2, axis = 1)
advantage_W = tf.Variable(tf.truncated_normal([128, self.action_space], stddev = 0.01))
value_W = tf.Variable(tf.truncated_normal([128, 1], stddev = 0.01))
advantage_out = tf.matmul(advantage_in, advantage_W)
value_out = tf.matmul(value_in, value_W)
#Then combine them together to get our final Q-values.
Q_out = value_out + advantage_out - tf.reduce_mean(advantage_out,reduction_indices=1,keep_dims=True)
model_dict = {'state_in': state_in, 'rnn_in': (rnn_c_in, rnn_h_in), 'Q_out':Q_out,
'rnn_out':rnn_state_out, 'batch_size_in': batch_size}
return model_dict
def sync_variables(self, sess):
# adding scope to network
sess.run(self.sync_op)
def train(self, sess, state_current, state_future, action, reward, end_game, rnn_state_in, batch_size, rnn_seq_len):
sess.run(self.update, feed_dict={self.target_dict['state_in']: state_future,
self.primary_dict['state_in']: state_current,
self.actions: action,
self.current_reward: reward,
self.end_game: end_game,
self.primary_dict['rnn_in'][0]: rnn_state_in[0],
self.primary_dict['rnn_in'][1]: rnn_state_in[1],
self.target_dict['rnn_in'][0]: rnn_state_in[0],
self.target_dict['rnn_in'][1]: rnn_state_in[1],
self.primary_dict['batch_size_in']: batch_size,
self.target_dict['batch_size_in']: batch_size,
self.trainLength: rnn_seq_len})
def predict_act(self, sess, state, rnn_state_in, batch_size):
# 1X80X80X4 single image
action, rnn_state_out = sess.run([self.predict, self.primary_dict['rnn_out']],
feed_dict = {self.primary_dict['state_in']: state,
self.primary_dict['rnn_in'][0]: rnn_state_in[0],
self.primary_dict['rnn_in'][1]: rnn_state_in[1],
self.primary_dict['batch_size_in']: batch_size})
return action, rnn_state_out
def return_rnn_state(self, sess, state, rnn_state_in, batch_size):
# 1X80X80X4 single image
rnn_state_out = sess.run(self.primary_dict['rnn_out'],
feed_dict = {self.primary_dict['state_in']: state,
self.primary_dict['rnn_in'][0]: rnn_state_in[0],
self.primary_dict['rnn_in'][1]: rnn_state_in[1],
self.primary_dict['batch_size_in']: batch_size})
return rnn_state_out
<file_sep>import tensorflow as tf
from params import Params
class DPNet():
def __init__(self, action_space):
self.IMG_X = Params['IMG_X']
self.IMG_Y = Params['IMG_Y']
self.IMG_Z = Params['IMG_Z']
self.entropy_penalty = Params['ENTROPY_PENALTY']
self.action_space = action_space
self.learning_rate = Params['LEARNING_RATE']
self.reward_discount = 0.99
self.policy_nn()
def policy_nn(self):
self.state_in, self.policy_out = self.build_nn()
self.predict_action = tf.argmax(self.policy_out, axis = 1)
self.exp_reward = tf.placeholder(tf.float32, shape=[None])
self.actions = tf.placeholder(tf.int32, shape=[None])
r_mean, r_var = tf.nn.moments(self.exp_reward, axes = [0])
normalized_reward = (self.exp_reward - r_mean)/tf.sqrt(r_var)
#normalized_reward = self.exp_reward
actions_onehot = tf.one_hot(self.actions, self.action_space, dtype=tf.float32)
neg_log_prob = -tf.multiply( actions_onehot * tf.log(self.policy_out + 1e-6), tf.reshape(normalized_reward, [-1, 1]))
entropy = self.policy_out * tf.log(self.policy_out + 1e-6)
loss = tf.reduce_mean(neg_log_prob) + self.entropy_penalty * tf.reduce_mean(entropy)
self.update = tf.train.AdamOptimizer(learning_rate = self.learning_rate).minimize(loss)
def build_nn(self):
# [batch, in_height, in_width, in_channels]
# assuming input to be batch_size*84*84*4
state_in = tf.placeholder(tf.float32, shape=[None, self.IMG_X, self.IMG_Y, self.IMG_Z])
state_resized = tf.image.resize_images(state_in, [80, 80])
##########################################################
#[filter_height, filter_width, in_channels, out_channels]
# conv layer 1, 8*8*32 filters, 4 stride
conv1_W = tf.Variable(tf.truncated_normal([8, 8, self.IMG_Z, 32], stddev = 0.01))
conv1_b = tf.Variable(tf.truncated_normal([1, 20, 20, 32], stddev = 0.01))
conv1_strides = [1, 4, 4, 1]
#output 20*20*32
conv1_out = tf.nn.conv2d(state_resized, conv1_W, conv1_strides,
padding = 'SAME') + conv1_b
conv1_out = tf.nn.relu(conv1_out)
###########################################################
# conv layer 2, 4*4*64 filters, 2 stride
conv2_W = tf.Variable(tf.truncated_normal([4, 4, 32, 64], stddev = 0.01))
conv2_b = tf.Variable(tf.truncated_normal([1, 9, 9, 64], stddev = 0.01))
conv2_strides = [1, 2, 2, 1]
# output 9*9*64
conv2_out = tf.nn.conv2d(conv1_out, conv2_W, conv2_strides,
padding = 'VALID') + conv2_b
conv2_out = tf.nn.relu(conv2_out)
###########################################################
# fully connected layer 1, (7*7*64 = 3136) * 512
ff1_input = tf.reshape(conv2_out, [-1, 5184])
ff1_W = tf.Variable(tf.truncated_normal([5184, 256], stddev = 0.01))
ff1_b = tf.Variable(tf.truncated_normal([1, 256], stddev = 0.01))
# output batch_size * 512
ff1_out = tf.matmul(ff1_input, ff1_W) + ff1_b
ff1_out = tf.nn.relu(ff1_out)
##################################################################
ff2_W = tf.Variable(tf.truncated_normal([ 256, self.action_space],
stddev = 0.01))
ff2_b = tf.Variable(tf.truncated_normal([ 1, self.action_space],
stddev = 0.01))
# final output, batch_size * action_space
ff2_out = tf.matmul(ff1_out, ff2_W) + ff2_b
policy_out = tf.nn.softmax(ff2_out)
return state_in, policy_out
def train(self, sess, state, action, reward):
sess.run(self.update, feed_dict={self.state_in: state,
self.actions: action,
self.exp_reward: reward})
def predict_policy(self, sess, state):
# 1X80X80X4 single image
policy = sess.run(self.policy_out,
feed_dict = {self.state_in: state})
return policy
<file_sep>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Feb 26 09:56:10 2017
Modified on April 28 08:34:05 2017
@author: shengx,siva
"""
#%%
import tensorflow as tf
import numpy as np
import gym
DEBUG = True
RENDER = False # if displaying game graphics real time
LEARNING_RATE = 0.001
NUM_ACTIONS = 2# one for up and one for down
IMG_X, IMG_Y = 80, 80
#%%################################################################
# Simple Policy Network Class
class PNet():
def __init__(self,input_size = (IMG_X * IMG_Y, 1), action_space = NUM_ACTIONS):
self.input_size, self.input_frame= input_size
self.action_space = action_space
def build_nn(self):
# [batch, in_height, in_width, in_channels]
# assuming input to be batch_size*84*84*4
self.input = tf.placeholder(tf.float32, shape = [None, self.input_size])
self.W1 = tf.Variable(tf.truncated_normal([self.input_size, 512], stddev = 0.1))
self.b1 = tf.Variable(tf.truncated_normal([1, 512], stddev = 0.1))
self.hidden = tf.nn.relu(tf.matmul(self.input, self.W1) + self.b1)
self.W2 = tf.Variable(tf.truncated_normal([512, self.action_space], stddev = 0.1))
self.b2 = tf.Variable(tf.truncated_normal([1, self.action_space], stddev = 0.1))
self.output = tf.nn.softmax(tf.matmul(self.hidden, self.W2) + self.b2)
self.predict_action = tf.argmax(self.output, axis = 1)
self.advantage = tf.placeholder(tf.float32, shape=[None])
self.actions = tf.placeholder(tf.int32, shape=[None])
self.actions_onehot = tf.one_hot(self.actions, self.action_space, dtype=tf.float32)
self.neg_log_prob = tf.multiply(-self.actions_onehot * tf.log(self.output), tf.reshape(self.advantage, [-1, 1]))
self.loss = tf.reduce_mean(self.neg_log_prob)
self.update = tf.train.AdamOptimizer(learning_rate = LEARNING_RATE).minimize(self.loss)
#%%################################################################
# utility functions
def process_frame(frame):
# input a single frame
# crop & downsample & average over 3 color channels
# Convert the image to binary. Foreground = 1, baackground = 0
return np.mean(frame[34: 194 : 2, 0: 160 : 2, :], axis = 2, dtype = 'float32') > 100
def discount_rewards(r):
discounted_r = np.zeros((len(r)))
running_add = 0
for t in reversed(range(0, len(r))):
if r[t] != 0: running_add = 0
running_add = running_add * future_reward_discount + r[t]
discounted_r[t] = running_add
return discounted_r
#%%################################################################
max_episode = 21
max_frame = 10000
frame_skip = 2
running_reward = None
future_reward_discount = 0.99
# starting tensorflow and game environment
env = gym.make("Pong-v0")
Atari_AI = PNet()
Atari_AI.build_nn()
init_op = tf.global_variables_initializer()
sess = tf.Session()
sess.run(init_op)
save_path = "/home/siva/Documents/CheckpointData_policy/"
saver = tf.train.Saver()
try:
ckpt = tf.train.get_checkpoint_state(save_path)
load_path = ckpt.model_checkpoint_path
saver.restore(sess, load_path)
print("Session restored...")
except:
print("Nothing to restore...")
# Training
i_episode = 0
state = np.zeros((80, 80, 2),dtype = 'float32')
state_sequence = np.zeros((max_frame, 6400)).astype('float32')
action_sequence = np.zeros((max_frame)).astype('int32')
reward_sequence = np.zeros((max_frame)).astype('float32')
reward_log = []
while True:
i_episode += 1
state[:,:,0] = process_frame(env.reset())
reward_sum = 0
for t in range(max_frame):
if RENDER:
env.render()
# select an action based on policy network output probability
diff_frame = np.reshape(state[:,:,1] - state[:,:,0], (1, 6400))
nn_prob = sess.run(Atari_AI.output, feed_dict={
Atari_AI.input: diff_frame})
action = np.random.choice(2, p=np.squeeze(nn_prob))
# excute the action for a few steps
for _ in range(frame_skip):
observation, reward, done, info = env.step(action + 1)
reward_sum += reward
if done:
break
# update the new state and reward and memory buffer
state[:,:,0] = state[:,:,1]
state[:,:,1] = process_frame(observation)
state_sequence[t, :] = diff_frame
action_sequence[t] = action
reward_sequence[t] = reward
# save the model after every 200 updates
if done:
decay_reward = discount_rewards(reward_sequence[0:t+1])
# normalize the reward
decay_reward -= np.mean(decay_reward)
decay_reward /= np.std(decay_reward)
sess.run(Atari_AI.update, feed_dict={
Atari_AI.input: state_sequence[0:t+1, :] ,
Atari_AI.actions: action_sequence[0:t+1],
Atari_AI.advantage: decay_reward})
# reset sequence memory
state_sequence = np.zeros((max_frame, 6400))
action_sequence = np.zeros((max_frame))
reward_sequence = np.zeros((max_frame))
# claculate and display the moving average of rewards
running_reward = reward_sum if running_reward is None else running_reward * 0.99 + reward_sum * 0.01
if i_episode % 10 == 0:
print('ep {}: reward: {}, mean reward: {:3f}'.format(i_episode, reward_sum, running_reward))
reward_log.append(running_reward)
else:
print('\tep {}: reward: {}'.format(i_episode, reward_sum))
if i_episode % 100 == 0:
saver.save(sess, save_path+'model-'+str(i_episode)+'.cptk')
print("SAVED MODEL #{}".format(i_episode))
break
<file_sep>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Feb 26 09:56:10 2017
qlearner1h.py
An AI agent that plays Atari games using Reinforcement Learning
with the OpenAI framework. The neural network architecture consists
of 1 hidden layer with 512 nodes.
@author: shengx, gaconte
"""
#%%
import tensorflow as tf
import numpy as np
import random
import gym
import pickle
DEBUG = True
RENDER = False # if displaying game graphics real time
IMG_X, IMG_Y = 80, 80
#ENV_NAME = "Pong-v0"
#ENV_NAME = "Qbert-v0"
#ENV_NAME = "SpaceInvaders-v0"
ENV_NAME = "Bowling-v0"
ACTION_SPACE = 6 # possible action = 1, 2, 3; still, up, down
TRAIN_EPISODES = 3000
TEST_EPISODES = 1
LEARNING_RATE = 0.001
max_episode = 21
max_frame = 10000
batch_size = 32
running_reward = None
future_reward_discount = 0.99
random_action_prob = 0.9
rand_prob_step = (0.9 - 0.1)/60000
buffer_size = 60000
frame_skip = 2
sync_freq = 2000
update_freq = 4
save_freq = 200
save_path = "./" + ENV_NAME + "q1h/"
#%% Deep Q-Network Structure
class DQNet():
def __init__(self,input_size = (80, 80, 1), action_space = ACTION_SPACE):
self.input_x, self.input_y, self.input_frame= input_size
self.action_space = action_space
def build_nn(self):
# [batch, in_height, in_width, in_channels]
# assuming input to be batch_size*84*84*4
self.input = tf.placeholder(tf.float32, shape=[None, self.input_x, self.input_y, self.input_frame])
self.W1 = tf.Variable(tf.truncated_normal([6400, 512], stddev = 0.1))
self.b1 = tf.Variable(tf.truncated_normal([1, 512], stddev = 0.1))
self.hidden1 = tf.nn.relu(tf.matmul(tf.reshape(self.input,[-1, 6400]), self.W1) + self.b1)
self.W2 = tf.Variable(tf.truncated_normal([512, ACTION_SPACE], stddev = 0.1))
self.b2 = tf.Variable(tf.truncated_normal([1, ACTION_SPACE], stddev = 0.1))
self.output = tf.matmul(self.hidden1, self.W2) + self.b2
###########################################################
# prediction, loss, and update
self.predict = tf.argmax(self.output, 1)
self.targetQ = tf.placeholder(shape=[None],dtype=tf.float32)
self.actions = tf.placeholder(shape=[None],dtype=tf.int32)
self.actions_onehot = tf.one_hot(self.actions, self.action_space, dtype=tf.float32)
self.Q = tf.reduce_sum((self.output * self.actions_onehot),
reduction_indices=1)
self.loss = tf.reduce_mean(tf.square(self.targetQ - self.Q))
self.update = tf.train.AdamOptimizer(learning_rate = LEARNING_RATE).minimize(self.loss)
def variable_list(self):
return [self.W1, self.b1, self.W2, self.b2]
#%% utility functions
class replayMemory():
def __init__(self, size):
# [i, :, :, 0:4] is the current state
# [i, :, :, 1:5] is the next state
self.frames = np.zeros((size, IMG_X, IMG_Y, 2), dtype = 'float32')
self.actions = np.zeros((size), dtype = 'int32')
self.rewards = np.zeros((size), dtype = 'float32')
self.done = np.zeros((size), dtype = 'int32')
self.__counter = 0
self.__size = size
def add(self, state, action, reward, done):
self.frames[self.__counter, :, :, : ] = state
self.actions[self.__counter] = action
self.rewards[self.__counter] = reward
self.done[self.__counter] = done
self.__counter += 1
self.__counter = self.__counter % self.__size
def makeBatch(self, idx):
return (self.frames[idx, :, :, 0], self.frames[idx, :, :, 1], self.actions[idx], self.rewards[idx], self.done[idx])
def process_frame(frame):
# input a single frame
# crop & downsample & average over 3 color channels
return np.mean(frame[34: 194 : 2, 0: 160 : 2, :], axis = 2, dtype = 'float32') > 100
def copy_variables(from_nn, to_nn, sess):
for i in range(len(from_nn)):
op = to_nn[i].assign(from_nn[i].value())
sess.run(op)
#%%
###################################################################
# pre-training, fill the replay memory buffer with 10,000 random examples
memory_buffer = replayMemory(buffer_size)
buffer_counter = 0
state_input = np.zeros((IMG_X, IMG_Y, 2), dtype = 'float32')
env = gym.make(ENV_NAME)
while True:
# reset the game environment, take a initial screen shot
observation = env.reset()
# the state of current game play, 0:2 is 3 previous frame,
# 3 is the current frame, 4 is the frame after action
state = np.zeros((IMG_X, IMG_Y, 5), dtype = 'float32')
state[:,:,-1] = process_frame(observation)
for t in range(buffer_size):
action = random.randint(0, 2)
# run the game with same action for a few frames
for _ in range(frame_skip):
observation, reward, done, info = env.step(action)
if done:
break
state = np.roll(state, -1, axis = 2)
# effective area [34:194, 0:168] with 2*2 downsampling -> 160/2 * 130/2 matrix
state[:,:,-1] = process_frame(observation)
state_input[:,:,0] = state[:,:,-2] - state[:,:,-3]
state_input[:,:,1] = state[:,:,-1] - state[:,:,-2]
memory_buffer.add(state_input, action, reward, done)
buffer_counter += 1
if done:
print("Episode finished after {} timesteps".format(t+1))
break
if buffer_counter > buffer_size:
break
env.close()
#%%
###################################################################
# Initialize environment
env = gym.make(ENV_NAME)
tf.reset_default_graph()
Atari_AI_primary = DQNet()
Atari_AI_primary.build_nn()
Atari_AI_target = DQNet()
Atari_AI_target.build_nn()
init_op = tf.global_variables_initializer()
reward_log = []
sess = tf.Session()
sess.run(init_op)
# Initialize saver
saver = tf.train.Saver()
try:
ckpt = tf.train.get_checkpoint_state(save_path)
load_path = ckpt.model_checkpoint_path
saver.restore(sess, load_path)
f = open(save_path + 'reward_log.cptk','rb')
reward_log = pickle.load(f)
f.close()
random_action_prob = 0.1
print("Session restored...")
except:
primary_variables = Atari_AI_primary.variable_list()
target_variables = Atari_AI_target.variable_list()
copy_variables(primary_variables, target_variables, sess)
print("Nothing to restore...")
# start training
i_episode = 0
updates = 0
steps = 0
while i_episode < TRAIN_EPISODES:
i_episode += 1
observation = env.reset()
state = np.zeros((IMG_X, IMG_Y, 5), dtype = 'float32')
state[:,:,-1] = process_frame(observation)
reward_sum = 0
for t in range(max_frame):
if RENDER:
env.render()
# select an action based on the action-value function Q
if np.random.random_sample() > random_action_prob:
# use model to predict action
#state_input[:,:,0] = state[:,:,-2] - state[:,:,-3]
action = sess.run(Atari_AI_primary.predict,
feed_dict = {Atari_AI_primary.input: np.reshape(state[:,:,-1] - state[:,:,-2], [1, 80, 80, 1])})[0]
else:
# random action
action = random.randint(0, 2) # random sample action from 1 to 3
# excute the action for a few steps
for _ in range(frame_skip):
observation, reward, done, info = env.step(action)
reward_sum += reward
if done:
break
# update the new state and reward and memory buffer
state = np.roll(state, -1, axis = 2)
state[:,:,-1] = process_frame(observation)
state_input[:,:,0] = state[:,:,-2] - state[:,:,-3]
state_input[:,:,1] = state[:,:,-1] - state[:,:,-2]
memory_buffer.add(state_input, action, reward, done)
updates += 1
if updates % update_freq == 0:
if random_action_prob > 0.1:
random_action_prob -= rand_prob_step
steps += 1
# randomly sample minibatch from memory
batch_sample_index = random.sample(range(buffer_size), batch_size)
state_current, state_future, actions, current_rewards, end_game = memory_buffer.makeBatch(batch_sample_index)
future_rewards = sess.run(Atari_AI_target.output,
feed_dict = {Atari_AI_target.input: np.expand_dims(state_future, axis = 3)})
targetQ = current_rewards + future_reward_discount * (1 - end_game) * np.amax(future_rewards, axis = 1)
# update the target-value function Q
sess.run(Atari_AI_primary.update, feed_dict = {
Atari_AI_primary.input: np.expand_dims(state_current, axis = 3),
Atari_AI_primary.actions: actions,
Atari_AI_primary.targetQ: targetQ})
# every C step reset Q' = Q
if steps % sync_freq == 0:
primary_variables = Atari_AI_primary.variable_list()
target_variables = Atari_AI_target.variable_list()
copy_variables(primary_variables, target_variables, sess)
# save the model after every 200 updates
if done:
running_reward = reward_sum if running_reward is None else running_reward * 0.99 + reward_sum * 0.01
if DEBUG:
if i_episode % 10 == 0:
print('ep {}: updates {}: reward: {}, mean reward: {:3f}'.format(i_episode, updates, reward_sum, running_reward))
else:
print('\tep {}: reward: {}'.format(i_episode, reward_sum))
# saving results
if i_episode % 10 == 0:
reward_log.append(running_reward)
if i_episode % save_freq == 0:
saver.save(sess, save_path+'model-'+str(i_episode)+'.cptk')
f = open(save_path + 'reward_log.cptk','wb')
pickle.dump(reward_log, f)
f.close()
break
#%% testing
###################################################################
# Initialize environment
env = gym.make(ENV_NAME)
tf.reset_default_graph()
Atari_AI_primary = DQNet()
Atari_AI_primary.build_nn()
Atari_AI_target = DQNet()
Atari_AI_target.build_nn()
init_op = tf.global_variables_initializer()
sess = tf.Session()
sess.run(init_op)
# Initialize saver
saver = tf.train.Saver()
try:
ckpt = tf.train.get_checkpoint_state(save_path)
load_path = ckpt.model_checkpoint_path
saver.restore(sess, load_path)
f = open(save_path + 'reward_log.cptk','rb')
reward_log = pickle.load(f)
f.close()
print("Session restored...")
except:
primary_variables = Atari_AI_primary.variable_list()
target_variables = Atari_AI_target.variable_list()
copy_variables(primary_variables, target_variables, sess)
print("Nothing to restore...")
# start training
i_episode = 0
total_reward_sum = 0
while i_episode < TEST_EPISODES:
i_episode += 1
observation = env.reset()
state = np.zeros((IMG_X, IMG_Y, 5), dtype = 'float32')
state[:,:,-1] = process_frame(observation)
reward_sum = 0
for t in range(max_frame):
# select an action based on the action-value function Q
action = sess.run(Atari_AI_primary.predict,
feed_dict = {Atari_AI_primary.input: np.reshape(state[:,:,-1] - state[:,:,-2], [1, 80, 80, 1])})[0]
# excute the action for a few steps
for _ in range(frame_skip):
observation, reward, done, info = env.step(action)
#env.render()
reward_sum += reward
if done:
break
# save the model after every 200 updates
if done:
total_reward_sum += reward_sum
print('\tep {}: reward: {} total_reward: {} action: {}'.format(i_episode, reward_sum, total_reward_sum, action))
break
average_reward = total_reward_sum/TEST_EPISODES
print('\taverage_reward: ' + str(average_reward))
<file_sep>import tensorflow as tf
import numpy as np
import gym
from a3c import A3CNet
from logger import Logger
from params import Params
class Agent():
def __init__(self, id, scope_name, sess, logger, optimizer, global_scope = None):
self.__thread_id = id
self.__scope_name = scope_name
self.__sess = sess
self.__opt = optimizer
self.__logger = logger
self.exit = False
self.env = gym.make(Params['GAME'])
self.IMG_X = Params['IMG_X']
self.IMG_Y = Params['IMG_Y']
self.IMG_Z = Params['IMG_Z']
self.frame_skip = Params['FRAME_SKIP']
self.reward_discount = Params['REWARD_DISCOUNT']
self.update_freq = Params['UPDATE_FREQ']
self.action_space = self.env.action_space.n
self.reward_sum = 0
self.state = np.zeros((self.IMG_X, self.IMG_Y, self.IMG_Z + 1), dtype = 'float32')
self.local_nn = A3CNet(self.__scope_name, self.action_space, self.__sess, self.__opt, global_scope)
def run(self):
# initialize environment
self.reset_game()
frame_sequence = np.zeros((self.update_freq, self.IMG_X, self.IMG_Y, self.IMG_Z), dtype = 'float32')
action_sequence = np.zeros((self.update_freq), dtype = 'float32')
reward_sequence = np.zeros((self.update_freq), dtype = 'float32')
done_sequence = np.zeros((self.update_freq), dtype = 'float32')
R_sequence = np.zeros((self.update_freq + 1), dtype = 'float32')
#while True:
while not self.exit:
# running game
for t in range(self.update_freq):
# take actions
observation, reward, action, done = self.take_action(self.state[:,:,1:5])
self.reward_sum += reward
# record game progress
frame_sequence[t, :, :, :] = self.state[:,:,1:5]
action_sequence[t] = action
reward_sequence[t] = reward
done_sequence[t] = done
# update next game state
self.state = np.roll(self.state, -1, axis = 2)
self.state[:,:,-1] = self.process_frame(observation)
if done:
break
R_sequence[t+1] = 0 if done else self.get_value(self.state[:,:,1:5])
'''
R_sequence[t+1] = np.clip(R_sequence[t+1], -1, 1)
for idx in range(t, -1, -1):
if reward_sequence[idx] != 0:
R_sequence[idx] = reward_sequence[idx]
else:
R_sequence[idx] = self.reward_discount * R_sequence[idx+1]
'''
reward_sequence = np.clip(reward_sequence, -1, 1)
for idx in range(t, -1, -1):
R_sequence[idx] = reward_sequence[idx] + self.reward_discount * R_sequence[idx+1]
self.update_nn(frame_sequence[0:t+1, :, :, :], action_sequence[0:t+1], R_sequence[0:t+1])
if done:
self.__logger.log(self.__thread_id, self.reward_sum)
self.reset_game()
def take_action(self, current_state):
policy_prob= self.local_nn.predict_policy(np.expand_dims(current_state, axis = 0))
# choose an action according to policy
action = np.random.choice(self.action_space, p=np.squeeze(policy_prob))
# take this action for certain steps and record the reward
reward = 0
for _ in range(self.frame_skip):
observation, reward_temp, done, info = self.env.step(action)
reward += reward_temp
if done:
break
return observation, reward, action, done
def get_value(self, state):
return self.local_nn.predict_value(np.expand_dims(state, axis = 0))
def update_nn(self, states, actions, rewards):
self.local_nn.update_global(states, actions, rewards)
self.local_nn.sync_variables()
def test(self):
pass
def process_frame(self, frame):
# output shape 105X80
return np.mean(frame[::2,::2], axis = 2, dtype = 'float32') / 128 - 1
def reset_game(self):
observation = self.env.reset()
self.state.fill(0)
self.state[:,:,-1] = self.process_frame(observation)
self.reward_sum = 0
<file_sep># PacmanDQN
Deep Reinforcement Learning in Pac-man
## Example usage
Run a model on `smallGrid` layout for 10000 episodes, of which 9000 episodes
are used for training.
```
$ python3 pacman.py -p PacmanDQN -n 10000 -x 9000 -l smallGrid
```
OR
You can directly run qrsh jobs in format jobs_*.qrsh
### Layouts
Different layouts can be found and created in the `layouts` directory
## Requirements
- `python==3.5.1`
- `tensorflow==0.8rc`
## Acknowledgements
DQN Framework by (made for ATARI / Arcade Learning Environment)
* [deepQN_tensorflow](https://github.com/mrkulk/deepQN_tensorflow) ([https://github.com/mrkulk/deepQN_tensorflow](https://github.com/mrkulk/deepQN_tensorflow))
Pac-man implementation by UC Berkeley:
* [The Pac-man Projects - UC Berkeley](http://ai.berkeley.edu/project_overview.html) ([http://ai.berkeley.edu/project_overview.html](http://ai.berkeley.edu/project_overview.html))
Deep Reinforcement Learning in pac-man
* [Deep RL pacman] (https://github.com/tychovdo/PacmanDQN)
<file_sep>Final project for BU EC500 K1/CS591 S2 Deep Learning
This project implements neural networks to play atari games using Reinforcement Learning.
The Following approaches to Reinforcement learning are explored:
1. Deep Q Network
2. Deep Policy Network
3. Asynchronous Actor-Critic Network
Two games are played:
1. Pong
2. Pacman
The environments used in this project are:
1. OpenAI gym
2. Berkeley Pacman Framework
<file_sep>
from agent import Agent
from logger import Logger
from params import Params
import tensorflow as tf
import threading
import time
def main():
tf.reset_default_graph()
graph = tf.Graph()
sess = tf.Session(graph=graph)
logger = Logger()
optimizer = tf.train.RMSPropOptimizer(
learning_rate=Params['LEARNING_RATE'],
decay=0.99,
momentum=0,
epsilon=0.1,
use_locking = True)
with graph.as_default():
# initialize global network
global_agent = Agent(-1, 'global', sess, logger, optimizer)
# initialize local networks
THREAD_NUM = Params['THREAD_NUM']
local_agent = []
for thread_id in range(THREAD_NUM):
local_scope = 'local'+str(thread_id)
local_agent.append(Agent(thread_id, local_scope, sess, logger, optimizer, 'global'))
# initialize tensorflow
init_op = tf.global_variables_initializer()
saver = tf.train.Saver()
sess.run(init_op)
logger.add_saver(sess, saver)
logger.restore()
graph.finalize()
training_thread = []
for id, agent in enumerate(local_agent):
t = threading.Thread(target = agent.run, args = ())
t.start()
time.sleep(1)
training_thread.append(t)
return local_agent, training_thread
if __name__ == "__main__":
local_agent, training_thread = main()
##%%
#for (agent, agent_thread) in zip(local_agent, training_thread):
# agent.exit = True
# time.sleep(0.5)
# agent_thread.join(1)<file_sep>import numpy as np
class replayMemory():
def __init__(self, IMG_X, IMG_Y, size):
# [i, :, :, 0:4] is the current state
# [i, :, :, 1:5] is the next state
self.IMG_X = IMG_X
self.IMG_Y = IMG_Y
self.frames = [None] * size
self.actions = [None] * size
self.rewards = [None] * size
self.done = [None] * size
self.__counter = 0
self.__size = size
def add(self, state, action, reward, done):
self.frames[self.__counter] = np.array(state)
self.actions[self.__counter] = np.array(action)
self.rewards[self.__counter] = np.array(reward)
self.done[self.__counter] =np.array(done)
self.__counter += 1
self.__counter = self.__counter % self.__size
def makeBatch(self, batch_size, temporal_length):
idx = np.random.randint(0, self.__size, (batch_size))
current_frame_sample = np.zeros((batch_size, temporal_length, self.IMG_X, self.IMG_Y, 1)).astype('float32')
next_frame_sample = np.zeros((batch_size, temporal_length, self.IMG_X, self.IMG_Y, 1)).astype('float32')
action_sample = np.zeros((batch_size, temporal_length)).astype('int32')
reward_sample = np.zeros((batch_size, temporal_length)).astype('float32')
done_sample = np.zeros((batch_size, temporal_length)).astype('int32')
i = 0
for c_idx in idx:
sequence_start_idx = np.random.randint(0, len(self.actions[c_idx]) - temporal_length - 1)
sequence_idx = np.array(range(sequence_start_idx, sequence_start_idx + temporal_length))
current_frame_sample[i, :, :, :, :] = self.frames[c_idx][sequence_idx,:,:,:]
next_frame_sample[i, :, :, :, :] = self.frames[c_idx][sequence_idx + 1,:,:,:]
action_sample[i, :] = self.actions[c_idx][sequence_idx]
reward_sample[i, :] = self.rewards[c_idx][sequence_idx]
done_sample[i, :] = self.done[c_idx][sequence_idx]
i += 1
current_frame_sample = np.reshape(current_frame_sample, (batch_size*temporal_length, self.IMG_X, self.IMG_Y, 1))
next_frame_sample = np.reshape(next_frame_sample, (batch_size*temporal_length, self.IMG_X, self.IMG_Y, 1))
action_sample = np.reshape(action_sample, (-1))
reward_sample = np.reshape(reward_sample, (-1))
done_sample = np.reshape(done_sample, (-1))
return (current_frame_sample, next_frame_sample, action_sample, reward_sample, done_sample)<file_sep># Built on The Pacman AI projects: http://ai.berkeley.edu/project_overview.html
import numpy as np
import random
import util
import time
import sys
from pacman import Directions
from game import Agent
import game
from collections import deque
import tensorflow as tf
from DQN import *
params = {
# Model backups
'load_file': None,#'./models/PacmanDQN_capsuleClassic_h8_ep100000',
'save_file': None,
'save_interval' : 5000,
'history':1,
'train_start': 5000, # Episodes before training starts
'batch_size': 32, # Replay memory batch size
'mem_size': 100000, # Replay memory size
'discount': 0.95, # Discount rate (gamma value)
'lr': .0002, # Learning reate
'rms_decay': 0.99, # RMS Prop decay
'rms_eps': 1e-6, # RMS Prop epsilon
# Epsilon value (epsilon-greedy)
'eps': 1.0, # Epsilon start value
'eps_final': 0.1, # Epsilon end value
'eps_step': 10000 # Epsilon steps between start and end (linear)
}
class PacmanDQN(game.Agent):
def __init__(self, args):
print("Initialise DQN Agent")
# Load parameters from user-given arguments
self.params = params
self.params['width'] = args['width']
self.params['height'] = args['height']
self.params['num_training'] = args['numTraining']
self.params['history'] = args['numHistory']
self.params['save_file'] =sys.argv[2]+'_'+ sys.argv[8]+'_h'+str(self.params['history'])+'_x2'
print ("HistoryLen: ",self.params['history'])
print ("FN: ",self.params['save_file'])
# Start Tensorflow session
n = 2 #number of nodes requested
gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=1.0/n)
self.sess = tf.Session(config = tf.ConfigProto(gpu_options = gpu_options, intra_op_parallelism_threads=n-1))
self.qnet = DQN(self.params)
# Q and cost
self.Q_global = []
# Stats
self.cnt = self.qnet.sess.run(self.qnet.global_step)
self.local_cnt = 0
self.numeps = 0
self.last_score = 0
self.last_reward = 0.
self.current_state = np.zeros((self.params['height'],self.params['width'],self.params['history']))
self.replay_mem = deque() #replay memmory
self.last_scores = deque()
def getMove(self, state):
# Take epsilon greedy action
if np.random.rand() > self.params['eps']:
# Take action
self.Q_pred = self.qnet.sess.run(
self.qnet.y,
feed_dict = {self.qnet.x: np.reshape(self.current_state,
(1, self.params['height'], self.params['width'], self.params['history'])),
self.qnet.q_t: np.zeros(1),
self.qnet.actions: np.zeros((1, 4)),
self.qnet.terminals: np.zeros(1),
self.qnet.rewards: np.zeros(1)})[0]
self.Q_global.append(max(self.Q_pred)) #GetQval
a_winner = np.argwhere(self.Q_pred == np.amax(self.Q_pred))
#Break tie..
if len(a_winner) > 1:
move = self.get_direction(a_winner[np.random.randint(0, len(a_winner))][0])
else:
move = self.get_direction(a_winner[0][0])
else:
# Random:
move = self.get_direction(np.random.randint(0, 4))
# Save last_action
self.last_action = self.get_value(move)
return move
#mapping dxn -->action index
def get_value(self, direction):
if direction == Directions.NORTH:
return 0.
elif direction == Directions.EAST:
return 1.
elif direction == Directions.SOUTH:
return 2.
else:
return 3.
#mapping action index -> dxn.
def get_direction(self, value):
if value == 0.:
return Directions.NORTH
elif value == 1.:
return Directions.EAST
elif value == 2.:
return Directions.SOUTH
else:
return Directions.WEST
def update_experience(self, state):
if self.last_action is not None:
# Update each step
self.last_state = np.copy(self.current_state)
self.current_state = self.getState(state)
#Update rewards
self.last_reward = state.data.score - self.last_score
self.last_score = state.data.score
#Make experience
experience = (self.last_state, float(self.last_reward), self.last_action, self.current_state, self.terminal)
#Add experience
self.replay_mem.append(experience)
if len(self.replay_mem) > self.params['mem_size']:
self.replay_mem.popleft()
#Start training...
self.train()
# Next
self.local_cnt += 1
self.frame += 1
#Linear epsilon decay
self.params['eps'] = max(self.params['eps_final'],
1.00 - float(self.cnt)/ float(self.params['eps_step']))
#Pacman comes here during its final round..
def final(self, state):
self.terminal = True
self.update_experience(state)
self.won = state.isWin()
#Stats out
sys.stdout.write("# %4d, l_r: %12f " %
(self.numeps, self.last_score))
sys.stdout.write("| Q: %10f | won: %r \n" % ((max(self.Q_global, default=float('nan')), self.won)))
sys.stdout.flush()
# Save model
if(params['save_file']):
if self.local_cnt > self.params['train_start'] and self.numeps % self.params['save_interval'] == 0:
self.qnet.save('./models/'+params['save_file'] + '_ep' + str(self.numeps))
print('Model saved')
def train(self):
# Train
if (self.local_cnt > self.params['train_start']):
#Get random batch of experiences from replay memory
batch = random.sample(self.replay_mem, self.params['batch_size'])
states = [] # States (s)
rewards = [] # Rewards (r)
actions = [] # Actions (a)
nstates = [] # Next states (s')
terminals = [] # Terminal state (t)
for i in batch:
states.append(i[0])
rewards.append(i[1])
actions.append(i[2])
nstates.append(i[3])
terminals.append(i[4])
states = np.array(states)
rewards = np.array(rewards)
actions = self.get_onehot(np.array(actions))
nstates = np.array(nstates)
terminals = np.array(terminals)
#Pass onto the learner...
self.cnt, self.cost_disp = self.qnet.train(states, rewards, actions, nstates, terminals)
def get_onehot(self, actions):
# make 1 hot action vector
actions_onehot = np.zeros((self.params['batch_size'], 4))
for i in range(len(actions)):
actions_onehot[i][int(actions[i])] = 1
return actions_onehot
#Get grayscaled agents in 2D matrix form
def getState(self, state):
# Create observation matrix of all the agents
width, height = self.params['width'], self.params['height']
matrix = np.zeros((height, width))
matrix.dtype = int
#Grayscaled value of each agent..
wall = 0.125
ghost = 0.250
food = 0.375
capsule = 0.500
scaredg = 0.625
pac = 0.998
#Add walls...
walls_matrix = state.data.layout.walls
for i in range(walls_matrix.height):
for j in range(walls_matrix.width):
cell = 1 if walls_matrix[j][i] else 0
matrix[-1-i][j] = cell*wall
#Add agents.. ghost/scare_ghost/pacman...
for agentState in state.data.agentStates:
pos = agentState.configuration.getPosition()
if not agentState.isPacman: #check for ghost
if agentState.scaredTimer > 0: #scared ghost..
matrix[-1-int(pos[1])][int(pos[0])] = scaredg
else: #regular ghost
matrix[-1-int(pos[1])][int(pos[0])] = ghost
else: #pacman..
matrix[-1-int(pos[1])][int(pos[0])] = pac
#Add food
food_matrix = state.data.food
for i in range(food_matrix.height):
for j in range(food_matrix.width):
cell = 1 if food_matrix[j][i] else 0
matrix[-1-i][j] = cell*food
#add capsule
capsule_matrix = state.data.food
for i in range(capsule_matrix.height):
for j in range(capsule_matrix.width):
cell = 1 if capsule_matrix[j][i] else 0
matrix[-1-i][j] = cell*capsule
n = self.params['history']
#Stack histories together..
observation = np.dstack([matrix]*n)
if (self.last_state!=None and n>1):
observation[:,:,0:n-2] = self.current_state[:,:,1:n-1]
observation[:,:,n-1] = obs
#print ("OBS:")
#print (observation.shape)
return observation
#return observation
#Start of each episode
def registerInitialState(self, state):
# Reset rewards
self.last_score = 0
self.current_score = 0
self.last_reward = 0.
# Reset states
self.last_state = None
self.current_state = self.getState(state)
# Reset actions
self.last_action = None
# Reset values
self.terminal = None
self.won = True
self.Q_global = []
self.delay = 0
# Next
self.frame = 0
self.numeps += 1
#Perfom legal actions else STOP
def getAction(self, state):
move = self.getMove(state)
# Stop moving when not legal
legal = state.getLegalActions(0)
if move not in legal:
move = Directions.STOP
return move
<file_sep>import tensorflow as tf
import numpy as np
import gym
from ddqn_lstm import DDQNet
from logger import Logger
from memory import replayMemory
from params import Params
class Agent():
def __init__(self):
self.env = gym.make(Params['GAME'])
# setting up parameters
self.batch_size = Params['BATCH_SIZE']
self.buffer_size = Params['MEMORY_BUFFER_SIZE']
self.random_action_prob = Params['RANDOM_ACTION_PROB_START']
self.random_action_prob_end = Params['RANDOM_ACTION_PROB_END']
self.frame_skip = Params['FRAME_SKIP']
self.update_freq = Params['UPDATE_FREQ']
self.sync_freq = Params['SYNC_FREQ']
self.rand_prob_step = (self.random_action_prob - self.random_action_prob_end)/Params['ANNEALING_STEP']
self.reward_discount = Params['REWARD_DISCOUNT']
self.IMG_X = Params['IMG_X']
self.IMG_Y = Params['IMG_Y']
self.rnn_h_units = Params['RNN_H_UNIT']
self.rnn_seq_len = Params['RNN_SEQUENCE_LENGTH']
self.action_space = self.env.action_space.n
self.updates = 0
# setting up utilities
self.memory_buffer = replayMemory(self.IMG_X, self.IMG_Y, self.buffer_size)
tf.reset_default_graph()
self.nn = DDQNet(self.action_space)
# initialize variables
self.sess = tf.Session()
self.saver = tf.train.Saver()
self.sess.run(tf.global_variables_initializer())
# restore variables
self.logger = Logger(self.sess, self.saver)
self.random_action_prob = self.random_action_prob_end if self.logger.restore() else self.random_action_prob
def init_memory(self):
# reset the game environment, take a initial screen shot
buffer_counter = 0
while True:
observation = self.env.reset()
# the state of current game play, 0:2 is 3 previous frame,
# 3 is the current frame, 4 is the frame after action
state = np.zeros((1, self.IMG_X, self.IMG_Y, 1), dtype = 'float32')
rnn_state = (np.zeros([1, self.rnn_h_units]),np.zeros([1, self.rnn_h_units]))
state = self.process_frame(observation)
state_sequence = []
action_sequence = []
reward_sequence = []
done_sequence = []
while True:
observation, action, reward, done, rnn_state = self.take_action(np.expand_dims(state, axis = 0), rnn_state, 1)
# effective area [34:194, 0:168] with 2*2 downsampling -> 160/2 * 130/2 matrix
state_sequence.append(state)
action_sequence.append(action)
reward_sequence.append(reward)
done_sequence.append(done)
state = self.process_frame(observation)
if done:
self.memory_buffer.add(state_sequence, action_sequence, reward_sequence, done_sequence)
buffer_counter += 1
print("Episode {} finished".format(buffer_counter+1))
break
if buffer_counter > self.buffer_size:
break
def run(self):
# initialize memory buffer
self.init_memory()
steps = 0
while True:
reward_sum = 0
observation = self.env.reset()
rnn_state = (np.zeros([1, self.rnn_h_units]),np.zeros([1, self.rnn_h_units]))
state_sequence = []
action_sequence = []
reward_sequence = []
done_sequence = []
state = self.process_frame(observation)
while True:
# select an action based on the action-value function Q
observation, action, reward, done, rnn_state = self.take_action(np.expand_dims(state, axis = 0), rnn_state, 1)
reward_sum += reward
# add current state to the memory buffer
state_sequence.append(state)
action_sequence.append(action)
reward_sequence.append(reward)
done_sequence.append(done)
# update the new state and reward and memory buffer
state = self.process_frame(observation)
steps += 1
# update the network after few steps
if steps % self.update_freq == 0:
if self.random_action_prob > 0.1:
self.random_action_prob -= self.rand_prob_step
self.update_nn()
# save the model after every 200 updates
if done:
self.memory_buffer.add(state_sequence, action_sequence, reward_sequence, done_sequence)
self.logger.log(reward_sum)
break
def take_action(self, current_state, rnn_state_in, batch_size):
# e-greedy algorithm for taking an action
if np.random.random_sample() > self.random_action_prob:
# use model to predict action
action, rnn_state = self.nn.predict_act(self.sess, current_state, rnn_state_in, batch_size)
else:
# random action
action = np.random.randint(self.action_space) # random sample action from 1 to 3
rnn_state = self.nn.return_rnn_state(self.sess, current_state, rnn_state_in, batch_size)
# excute the action for a few steps
reward = 0
for _ in range(self.frame_skip):
observation, reward_temp, done, info = self.env.step(action)
reward += reward_temp
if done:
break
return (observation, action, reward, done, rnn_state)
def update_nn(self):
# randomly sample minibatch from memory
state_current, state_future, actions, current_rewards, end_game = self.memory_buffer.makeBatch(self.batch_size, self.rnn_seq_len)
rnn_state_init = (np.zeros([self.batch_size, self.rnn_h_units]),np.zeros([self.batch_size, self.rnn_h_units]))
self.nn.train(self.sess, state_current, state_future, actions, current_rewards, end_game, rnn_state_init, self.batch_size, self.rnn_seq_len)
# # every C step reset Q' = Q
self.updates += 1
if self.updates % self.sync_freq == 0:
self.nn.sync_variables(self.sess)
def test(self):
while True:
observation = self.env.reset()
state = np.zeros((1, self.IMG_X, self.IMG_Y, 1), dtype = 'float32')
state[0, :,:,-1] = self.process_frame(observation)
while True:
self.env.render()
# select an action based on the action-value function Q
observation, action, reward, done = self.take_action(state)
# update the new state
state[0, :, :, 0] = self.process_frame(observation)
if done:
break
def process_frame(self, frame):
#return np.mean(frame[34: 194 : 2, 0: 160 : 2, :], axis = 2, dtype = 'float32') > 100
return np.mean(frame[::2,::2], axis = 2, dtype = 'float32', keepdims = True) / 128 - 1
def reset_game(self):
pass
<file_sep>#!/bin/bash -l
# Specify the project name
#$-P dlearn
# Specify the time limit
#$-l h_rt=48:00:00
# Job Name
#$-N capsuleDQN_h8_x2
# Send email at the end of the job
#$-m ae
# Join error and output streams
#$-j y
# Specify the number of cores
#$-pe omp 2
#gpu requirement
#$-l gpus=0.5
#gpu capability
#$-l gpu_c=3.5
#Load modules:
module load cuda/8.0
module load cudnn/5.1
module load python/3.6.0
module load tensorflow/r1.0_python-3.6.0
#Run the program
#Run a model on smallGrid layout for 6000 episodes, of which 5000 episodes are used for training
#python3 pacman.py -p PacmanDQN -n 6000 -x 5000 -l smallGrid
python3 pacman.py -p PacmanDQN -n 100000 -x 90000 -l capsuleClassic -q --numHistory 8
#python3 pacman.py -p SimpleQman -n 100000 -x 90000 -l capsuleClassic -q --numHistory 1
<file_sep>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Apr 12 21:19:09 2017
@author: shengx
"""
import tensorflow as tf
import numpy as np
Params = {
'GAME': 'Pong-v0',
'LEARNING_RATE': 0.00025,
'REWARD_DISCOUNT': 0.99,
'FRAME_SKIP': 2,
'SYNC_FREQ': 2000,
'UPDATE_FREQ': 4,
'SAVE_FREQ': 2000,
'SAVE_PATH': './log/',
'IMG_X': 105,
'IMG_Y': 80,
'IMG_Z': 1,
'ENTROPY_PENALTY': 0,
'MIN_POLICY': 0.02}
<file_sep># Modified version of: https://github.com/mrkulk/deepQN_tensorflow
import numpy as np
import tensorflow as tf
class DQN:
def __init__(self, params):
self.params = params
self.network_name = 'qnet'
self.sess = tf.Session()
self.x = tf.placeholder('float', [None, params['height'],params['width'], params['history']],name=self.network_name + '_x')
self.actions = tf.placeholder("float", [None, 4], name=self.network_name + '_actions')
self.rewards = tf.placeholder("float", [None], name=self.network_name + '_rewards')
self.terminals = tf.placeholder("float", [None], name=self.network_name + '_terminals')
self.q_t = tf.placeholder('float', [None], name=self.network_name + '_q_t')
# Layer 1 (Convolutional)
layer_name = 'conv1' ; size = 8 ; channels = params['history'] ; filters = 16 ; stride = 2
self.w1 = tf.Variable(tf.random_normal([size,size,channels,filters], stddev=0.01),name=self.network_name + '_'+layer_name+'_weights')
self.b1 = tf.Variable(tf.constant(0.1, shape=[filters]),name=self.network_name + '_'+layer_name+'_biases')
self.c1 = tf.nn.conv2d(self.x, self.w1, strides=[1, stride, stride, 1], padding='SAME',name=self.network_name + '_'+layer_name+'_convs')
self.o1 = tf.nn.relu(tf.add(self.c1,self.b1),name=self.network_name + '_'+layer_name+'_activations')
# Layer 2 (Convolutional)
layer_name = 'conv2' ; size = 4 ; channels = 16 ; filters = 32 ; stride = 1
self.w2 = tf.Variable(tf.random_normal([size,size,channels,filters], stddev=0.01),name=self.network_name + '_'+layer_name+'_weights')
self.b2 = tf.Variable(tf.constant(0.1, shape=[filters]),name=self.network_name + '_'+layer_name+'_biases')
self.c2 = tf.nn.conv2d(self.o1, self.w2, strides=[1, stride, stride, 1], padding='SAME',name=self.network_name + '_'+layer_name+'_convs')
self.o2 = tf.nn.relu(tf.add(self.c2,self.b2),name=self.network_name + '_'+layer_name+'_activations')
# Layer 3 (Convolutional)
layer_name = 'conv3' ; size = 3 ; channels = 32 ; filters = 32 ; stride = 1
self.w3 = tf.Variable(tf.random_normal([size,size,channels,filters], stddev=0.01),name=self.network_name + '_'+layer_name+'_weights')
self.b3 = tf.Variable(tf.constant(0.1, shape=[filters]),name=self.network_name + '_'+layer_name+'_biases')
self.c3 = tf.nn.conv2d(self.o2, self.w3, strides=[1, stride, stride, 1], padding='SAME',name=self.network_name + '_'+layer_name+'_convs')
self.o3 = tf.nn.relu(tf.add(self.c3,self.b3),name=self.network_name + '_'+layer_name+'_activations')
o3_shape = self.o3.get_shape().as_list()
# Layer 4 (Fully connected)
layer_name = 'fc4' ; hiddens = 256 ; dim = o3_shape[1]*o3_shape[2]*o3_shape[3]
self.o3_flat = tf.reshape(self.o3, [-1,dim],name=self.network_name + '_'+layer_name+'_input_flat')
self.w4 = tf.Variable(tf.random_normal([dim,hiddens], stddev=0.01),name=self.network_name + '_'+layer_name+'_weights')
self.b4 = tf.Variable(tf.constant(0.1, shape=[hiddens]),name=self.network_name + '_'+layer_name+'_biases')
self.ip4 = tf.add(tf.matmul(self.o3_flat,self.w4),self.b4,name=self.network_name + '_'+layer_name+'_ips')
self.o4 = tf.nn.relu(self.ip4,name=self.network_name + '_'+layer_name+'_activations')
# Layer 5
layer_name = 'fc5' ; hiddens = 4 ; dim = 256
self.w5 = tf.Variable(tf.random_normal([dim,hiddens], stddev=0.01),name=self.network_name + '_'+layer_name+'_weights')
self.b5 = tf.Variable(tf.constant(0.1, shape=[hiddens]),name=self.network_name + '_'+layer_name+'_biases')
self.y = tf.add(tf.matmul(self.o4,self.w5),self.b5,name=self.network_name + '_'+layer_name+'_outputs')
#Q,Cost,Optimizer
self.discount = tf.constant(self.params['discount'])
self.yj = tf.add(self.rewards, tf.multiply(1.0-self.terminals, tf.multiply(self.discount, self.q_t)))
self.Q_pred = tf.reduce_sum(tf.multiply(self.y,self.actions), reduction_indices=1)
self.cost = tf.reduce_sum(tf.pow(tf.subtract(self.yj, self.Q_pred), 2))
self.global_step = tf.Variable(0, name='global_step', trainable=False)
self.rmsprop = tf.train.RMSPropOptimizer(self.params['lr'],self.params['rms_decay'],0.0,self.params['rms_eps']).minimize(self.cost,global_step=self.global_step)
self.saver = tf.train.Saver(max_to_keep=0)
self.sess.run(tf.global_variables_initializer())
if self.params['load_file'] is not None:
print('Loading checkpoint...')
self.saver.restore(self.sess,self.params['load_file'])
def train(self,states,rewards,actions,nstates,terminals):
feed_dict={self.x: nstates, self.q_t: np.zeros(nstates.shape[0]), self.actions: actions, self.terminals:terminals, self.rewards: rewards}
q_t = self.sess.run(self.y,feed_dict=feed_dict) #q-val based on next state
q_t = np.amax(q_t,axis=1)
feed_dict={self.x: states, self.q_t: q_t, self.actions: actions, self.terminals:terminals, self.rewards: rewards}
_,cnt,cost = self.sess.run([self.rmsprop,self.global_step,self.cost],feed_dict=feed_dict)
return cnt, cost
def save(self,filename):
self.saver.save(self.sess, filename)
|
f296970c47ec0234e43a8a7417b019bb938c6b5c
|
[
"Markdown",
"Python",
"Shell"
] | 24 |
Python
|
grumpySloth357/DeepLearningProject
|
cad6162ef1c072e7ddf2286597cde64739af581f
|
2b2dc4cadfbb1ea2f1cef4cccbf1126c67d93ae6
|
refs/heads/master
|
<file_sep>console.log('toot')<file_sep># Skald
#### A pen and paper game assistant
This is a UI demo to begin testing Kunsido integration
|
e0603b03eba24e407414d5df4aa561b16a0fafad
|
[
"JavaScript",
"Markdown"
] | 2 |
JavaScript
|
Sleepy-Fish/skald-ui
|
617bf0bd06e527b36cd5bc1edeb8b021efa2106c
|
0a252b0827bb1cd2ed3bb42bfe04c0bf3c97b855
|
refs/heads/master
|
<repo_name>jebreimo/Xyz<file_sep>/tests/XyzTest/test_Projections.cpp
//****************************************************************************
// Copyright © 2016 <NAME>. All rights reserved.
// Created by <NAME> on 2016-01-02.
//
// This file is distributed under the BSD License.
// License text is included with the source distribution.
//****************************************************************************
#include "Xyz/Xyz.hpp"
#include "YtestUtilities.hpp"
namespace
{
void test_perspective_projection()
{
auto m = Xyz::make_frustum_matrix<double>(-1, 1, -1, 1, 9, 11);
auto v = Xyz::make_vector4<double>(-1.0, -1.0, -9.0, 1.0);
auto w = m * v;
Y_EQUAL(w, Xyz::make_vector4<double>(-9, -9, -9, 9));
}
void test_look_at()
{
auto m = Xyz::make_look_at_matrix(Xyz::make_vector3<double>(5, 2, 3),
Xyz::make_vector3<double>(1, 8, 3),
Xyz::make_vector3<double>(0, 0, 1));
const auto result = m * Xyz::make_vector4(1.5, 4.0, 3.0, 1.0);
const auto expected = Xyz::make_vector4(-std::sqrt(1 + 1.5 * 1.5),
0.0,
-std::sqrt(2 * 2 + 3 * 3),
1.0);
Y_EQUIVALENT(result, expected, 1e-10);
}
Y_SUBTEST("Fundamentals",
test_perspective_projection,
test_look_at);
}
<file_sep>/tests/CatchXyzTest/test_ComplexApprox.cpp
//****************************************************************************
// Copyright © 2022 <NAME>. All rights reserved.
// Created by <NAME> on 2022-12-29.
//
// This file is distributed under the BSD License.
// License text is included with the source distribution.
//****************************************************************************
#include <Xyz/ComplexApprox.hpp>
#include <catch2/catch_test_macros.hpp>
TEST_CASE("ComplexApprox comparisons")
{
Xyz::ComplexApprox<double> a({1, 1}, {0.01, 0.01});
REQUIRE(a == std::complex<double>(0.991, 1));
REQUIRE(a == std::complex<double>(1.009, 1));
REQUIRE(a == std::complex<double>(1, 0.991));
REQUIRE(a == std::complex<double>(1, 1.009));
REQUIRE_FALSE(a == std::complex<double>(0.989, 1));
REQUIRE_FALSE(a == std::complex<double>(1.011, 1));
REQUIRE_FALSE(a == std::complex<double>(1, 0.989));
REQUIRE_FALSE(a == std::complex<double>(1, 1.011));
}
<file_sep>/tests/CatchXyzTest/test_Rect.cpp
//****************************************************************************
// Copyright © 2023 <NAME>. All rights reserved.
// Created by <NAME> on 2023-08-19.
//
// This file is distributed under the BSD License.
// License text is included with the source distribution.
//****************************************************************************
#include <Xyz/Rectangle.hpp>
#include <catch2/catch_test_macros.hpp>
TEST_CASE("Test Rectangle")
{
Xyz::RectangleI rect({10, 20}, {100, 80});
REQUIRE(Xyz::get_center(rect) == Xyz::Vector2I(60, 60));
REQUIRE(Xyz::get_min(rect) == Xyz::Vector2I(10, 20));
REQUIRE(Xyz::get_bottom_left(rect) == Xyz::Vector2I(10, 20));
REQUIRE(Xyz::get_bottom_right(rect) == Xyz::Vector2I(110, 20));
REQUIRE(Xyz::get_top_left(rect) == Xyz::Vector2I(10, 100));
REQUIRE(Xyz::get_top_right(rect) == Xyz::Vector2I(110, 100));
REQUIRE(Xyz::get_max(rect) == Xyz::Vector2I(110, 100));
Xyz::set_center(rect, Xyz::Vector2I(45, 75));
REQUIRE(rect.origin == Xyz::Vector2I(-5, 35));
}
TEST_CASE("Test Rectangle min and max")
{
auto get_min = [](const Xyz::Rectangle<int>& r){return Xyz::get_min(r);};
auto get_max = [](const Xyz::Rectangle<int>& r){return Xyz::get_max(r);};
REQUIRE(get_min({{10, 15}, {10, 10}}) == Xyz::Vector2I(10, 15));
REQUIRE(get_max({{10, 15}, {10, 10}}) == Xyz::Vector2I(20, 25));
REQUIRE(get_min({{10, 15}, {-10, 10}}) == Xyz::Vector2I(0, 15));
REQUIRE(get_max({{10, 15}, {-10, 10}}) == Xyz::Vector2I(10, 25));
REQUIRE(get_min({{10, 15}, {10, -10}}) == Xyz::Vector2I(10, 5));
REQUIRE(get_max({{10, 15}, {10, -10}}) == Xyz::Vector2I(20, 15));
REQUIRE(get_min({{10, 15}, {-10, -10}}) == Xyz::Vector2I(0, 5));
REQUIRE(get_max({{10, 15}, {-10, -10}}) == Xyz::Vector2I(10, 15));
}
TEST_CASE("Test Rectangle is_empty")
{
REQUIRE(is_empty(Xyz::RectangleF({2, 3}, {0, 0})));
REQUIRE(is_empty(Xyz::RectangleF({2, 3}, {0, -1})));
REQUIRE(is_empty(Xyz::RectangleF({2, 3}, {-1, 0})));
REQUIRE(!is_empty(Xyz::RectangleF({2, 3}, {-1, -1})));
}
TEST_CASE("Test Rectangle normalize")
{
using R = Xyz::RectangleD;
REQUIRE(Xyz::normalize(R({10, 15}, {10, 10})) == R({10, 15}, {10, 10}));
REQUIRE(Xyz::normalize(R({10, 15}, {-10, 10})) == R({0, 15}, {10, 10}));
REQUIRE(Xyz::normalize(R({10, 15}, {10, -10})) == R({10, 5}, {10, 10}));
REQUIRE(Xyz::normalize(R({10, 15}, {-10, -10})) == R({0, 5}, {10, 10}));
}
<file_sep>/include/Xyz/Triangle.hpp
//****************************************************************************
// Copyright © 2016 <NAME>. All rights reserved.
// Created by <NAME> on 26.02.2016.
//
// This file is distributed under the BSD License.
// License text is included with the source distribution.
//****************************************************************************
#pragma once
#include "Approx.hpp"
#include "Vector.hpp"
namespace Xyz
{
template <typename T, unsigned N>
class Triangle
{
public:
constexpr Triangle() = default;
constexpr Triangle(const Vector<T, N>& a,
const Vector<T, N>& b,
const Vector<T, N>& c)
: points{a, b, c}
{}
constexpr const Vector<T, N>& operator[](unsigned i) const
{
return points[i];
}
constexpr Vector<T, N>& operator[](unsigned i)
{
return points[i];
}
Vector<T, N> points[3];
};
template <typename T, unsigned N>
[[nodiscard]]
constexpr Triangle<T, N> make_triangle(const Vector<T, N>& a,
const Vector<T, N>& b,
const Vector<T, N>& c)
{
return Triangle<T, N>(a, b, c);
}
template <typename T, unsigned N>
[[nodiscard]]
auto get_area_squared(const Triangle<T, N>& triangle)
{
// Heron's formula with minimal use of square roots.
auto a2 = get_length_squared(triangle[1] - triangle[0]);
auto b2 = get_length_squared(triangle[2] - triangle[1]);
auto c2 = get_length_squared(triangle[0] - triangle[2]);
auto d = a2 + b2 - c2;
return (4 * a2 * b2 - d * d) / 16;
}
template <typename T, unsigned N>
[[nodiscard]]
typename FloatType<T>::type get_area(const Triangle<T, N>& triangle)
{
return std::sqrt(get_area_squared(triangle));
}
template <typename T, typename U,
typename Float = typename FloatType<decltype(T() + U())>::type>
[[nodiscard]]
bool contains_point(const Triangle<T, 2>& triangle,
const Vector<U, 2>& point,
Float margin = Constants<Float>::DEFAULT_MARGIN)
{
auto a = dot(get_normal(triangle[1] - triangle[0]),
(point - triangle[0]));
if (Xyz::Approx<Float>(a, margin) <= 0)
return false;
auto b = dot(get_normal(triangle[2] - triangle[1]),
(point - triangle[1]));
if (Xyz::Approx<Float>(b, margin) <= 0)
return false;
auto c = dot(get_normal(triangle[0] - triangle[2]),
(point - triangle[2]));
return Xyz::Approx<Float>(c, margin) > 0;
}
template <typename T, typename U,
typename Float = typename FloatType<decltype(T() + U())>::type>
[[nodiscard]]
bool contains_point_inclusive(const Triangle<T, 2>& triangle,
const Vector<U, 2>& point,
Float margin = Constants<Float>::DEFAULT_MARGIN)
{
auto a = dot(get_normal(triangle[1] - triangle[0]),
(point - triangle[0]));
if (Xyz::Approx<Float>(a, margin) < 0)
return false;
auto b = dot(get_normal(triangle[2] - triangle[1]),
(point - triangle[1]));
if (Xyz::Approx<Float>(b, margin) < 0)
return false;
auto c = dot(get_normal(triangle[0] - triangle[2]),
(point - triangle[2]));
return Xyz::Approx<Float>(c, margin) >= 0;
}
}
<file_sep>/include/Xyz/LineLineIntersection.hpp
//****************************************************************************
// Copyright © 2016 <NAME>. All rights reserved.
// Created by <NAME> on 09.02.2016
//
// This file is distributed under the BSD License.
// License text is included with the source distribution.
//****************************************************************************
#pragma once
#include <tuple>
#include "Approx.hpp"
#include "Line.hpp"
#include "LineSegment.hpp"
namespace Xyz
{
enum class LineRelationship
{
/* Lines or line segments are parallel, but not co-linear.
*/
NONINTERSECTING,
/* Lines or line segments are intersecting.
*/
INTERSECTING,
/* Lines are overlapping.
*/
OVERLAPPING,
/* Line segments are co-linear and may or may not overlap.
*/
COLINEAR
};
#define XYZ_CASE_OSTREAM_ENUM(name) \
case name: return os << #name
inline std::ostream& operator<<(std::ostream& os, LineRelationship e)
{
switch (e)
{
XYZ_CASE_OSTREAM_ENUM(LineRelationship::NONINTERSECTING);
XYZ_CASE_OSTREAM_ENUM(LineRelationship::INTERSECTING);
XYZ_CASE_OSTREAM_ENUM(LineRelationship::OVERLAPPING);
XYZ_CASE_OSTREAM_ENUM(LineRelationship::COLINEAR);
default: return os << "Unknown value.";
}
}
template <typename T, typename Float = typename FloatType<T>::type>
std::tuple<LineRelationship, Float, Float>
get_intersection_positions(const Line<T, 2>& a,
const Line<T, 2>& b,
Float margin = Constants<Float>::DEFAULT_MARGIN)
{
auto v_a = get_vector(a);
auto n_b = get_normal(get_vector(b));
Float denominator = dot(v_a, n_b);
if (Approx<Float>(denominator, margin) == 0.0)
{
auto distance = dot(n_b, (get_point(a) - get_point(b)));
if (Approx<Float>(distance, margin) == 0)
return {LineRelationship::OVERLAPPING, Float(), Float()};
else
return {LineRelationship::NONINTERSECTING, Float(), Float()};
}
auto n_a = get_normal(get_vector(a));
auto v_ab = get_point(b) - get_point(a);
return {LineRelationship::INTERSECTING,
dot(v_ab, n_b) / denominator,
dot(v_ab, n_a) / denominator};
}
template <typename T, typename Float = typename FloatType<T>::type>
std::tuple<LineRelationship, Float, Float>
get_intersection_positions(const LineSegment<T, 2>& a,
const LineSegment<T, 2>& b,
Float margin = Constants<Float>::DEFAULT_MARGIN)
{
auto [rel, t0, t1] = get_intersection_positions(make_line(a),
make_line(b),
margin);
if (rel == LineRelationship::OVERLAPPING)
{
return {LineRelationship::COLINEAR, t0, t1};
}
else if (rel == LineRelationship::INTERSECTING
&& 0.0 < Approx<Float>(t0, margin)
&& Approx<Float>(t0, margin) < 1.0
&& 0.0 < Approx<Float>(t1, margin)
&& Approx<Float>(t1, margin) < 1.0)
{
return {rel, t0, t1};
}
else
{
return {LineRelationship::NONINTERSECTING, t0, t1};
}
}
template <typename T, typename Float = typename FloatType<T>::type>
std::pair<bool, std::pair<Float, Float>>
get_projection_extent(const LineSegment<T, 2>& a,
const LineSegment<T, 2>& b,
Float margin = Constants<Float>::DEFAULT_MARGIN)
{
auto length = Float(get_length_squared(get_vector(a)));
auto ta0 = dot(get_vector(a), (get_start(b) - get_start(a))) / length;
auto ta1 = dot(get_vector(a), (get_end(b) - get_start(a))) / length;
if ((ta0 > 1 && ta1 > 1) || (ta0 < 0 && ta1 < 0))
return {false, {ta0, ta1}};
ta0 = clamp<Float>(ta0, 0.0, 1.0);
ta1 = clamp<Float>(ta1, 0.0, 1.0);
return {true, {ta0, ta1}};
}
template <typename T, typename Float = typename FloatType<T>::type>
std::tuple<bool, std::pair<Float, Float>, std::pair<Float, Float>>
get_projection_extents(const LineSegment<T, 2>& a,
const LineSegment<T, 2>& b,
Float margin = Constants<Float>::DEFAULT_MARGIN)
{
auto [overlaps_a, offsets_a] = get_projection_extent(a, b, margin);
auto [overlaps_b, offsets_b] = get_projection_extent(b, a, margin);
if (offsets_a.first > offsets_a.second)
std::swap(offsets_b.first, offsets_b.second);
return {overlaps_a && overlaps_b, offsets_a, offsets_b};
}
template <typename T, typename Float = typename FloatType<T>::type>
std::tuple<LineRelationship,
std::pair<Float, Float>,
std::pair<Float, Float>>
get_intersection_extents(const LineSegment<T, 2>& a,
const LineSegment<T, 2>& b,
Float margin = Constants<Float>::DEFAULT_MARGIN)
{
using std::get;
auto isect = get_intersection_positions(a, b, margin);
if (get<0>(isect) == LineRelationship::COLINEAR)
{
auto overlap = get_projection_extents(a, b, margin);
return {get<0>(overlap) ? LineRelationship::INTERSECTING
: LineRelationship::NONINTERSECTING,
get<1>(overlap), get<2>(overlap)};
}
else
{
return {get<0>(isect),
{get<1>(isect), get<1>(isect)},
{get<2>(isect), get<2>(isect)}};
}
}
}
<file_sep>/tests/CatchXyzTest/test_Vector.cpp
//****************************************************************************
// Copyright © 2022 <NAME>. All rights reserved.
// Created by <NAME> on 2022-12-25.
//
// This file is distributed under the BSD License.
// License text is included with the source distribution.
//****************************************************************************
#include <Xyz/SphericalPoint.hpp>
#include <catch2/catch_test_macros.hpp>
#include "catch2/matchers/catch_matchers_floating_point.hpp"
constexpr auto PI = Xyz::Constants<double>::PI;
constexpr auto MARGIN = Xyz::Constants<double>::DEFAULT_MARGIN;
using Catch::Matchers::WithinAbs;
void test_from_xyz_to_spherical_and_back(Xyz::Vector3D cartesian,
Xyz::SphericalPoint<double> spherical)
{
auto sp = Xyz::to_spherical<double>(cartesian);
REQUIRE_THAT(sp.radius, WithinAbs(spherical.radius, MARGIN));
REQUIRE_THAT(sp.azimuth, WithinAbs(spherical.azimuth, MARGIN));
REQUIRE_THAT(sp.polar, WithinAbs(spherical.polar, MARGIN));
auto p = Xyz::to_cartesian(sp);
REQUIRE_THAT(p[0], WithinAbs(cartesian[0], MARGIN));
REQUIRE_THAT(p[1], WithinAbs(cartesian[1], MARGIN));
REQUIRE_THAT(p[2], WithinAbs(cartesian[2], MARGIN));
}
TEST_CASE("xyz to spherical")
{
SECTION("0°, -45°")
{
test_from_xyz_to_spherical_and_back(
{1, 0, -1},
{sqrt(2), 0, -PI / 4});
}
SECTION("45°, 35.3°")
{
test_from_xyz_to_spherical_and_back(
{1, 1, 1},
{sqrt(3), PI / 4, asin(1 / sqrt(3))});
}
SECTION("90°, -45°")
{
test_from_xyz_to_spherical_and_back(
{0, 1, -1},
{sqrt(2), PI / 2, -PI / 4});
}
SECTION("135°, 35.3°")
{
test_from_xyz_to_spherical_and_back(
{-1, 1, 1},
{sqrt(3), 3 * PI / 4, asin(1 / sqrt(3))});
}
SECTION("180°, -45°")
{
test_from_xyz_to_spherical_and_back(
{-1, 0, -1},
{sqrt(2), PI, -PI / 4});
}
SECTION("-135°, 35.3°")
{
test_from_xyz_to_spherical_and_back(
{-1, -1, 1},
{sqrt(3), -3 * PI / 4, asin(1 / sqrt(3))});
}
SECTION("180°, -45°")
{
test_from_xyz_to_spherical_and_back(
{-1, 0, -1},
{sqrt(2), PI, -PI / 4});
}
SECTION("-45°, 35.3°")
{
test_from_xyz_to_spherical_and_back(
{1, -1, 1},
{sqrt(3), -1 * PI / 4, asin(1 / sqrt(3))});
}
SECTION("-90°, -45°")
{
test_from_xyz_to_spherical_and_back(
{0, -1, -1},
{sqrt(2), -PI / 2, -PI / 4});
}
}
TEST_CASE("clamp vector")
{
Xyz::Vector4D v1(1, -2, 3, -4);
auto v2 = get_clamped(v1, -1.0, 1.0);
REQUIRE(v2 == Xyz::Vector4D(1, -1, 1, -1));
REQUIRE(v1 == Xyz::Vector4D(1, -2, 3, -4));
clamp_inplace(v1, -1.0, 1.0);
REQUIRE(v1 == Xyz::Vector4D(1, -1, 1, -1));
}
TEST_CASE("scale vector")
{
Xyz::Vector4D v1(1, -2, 3, -4);
auto v2 = get_scaled(v1, 1.0);
const auto root = std::sqrt(1 + 2*2 + 3*3 + 4*4);
REQUIRE(are_equivalent(v2, Xyz::Vector4D(1 / root, -2 / root, 3 / root, -4 / root)));
scale_inplace(v1, 1.0);
REQUIRE(are_equivalent(v1, Xyz::Vector4D(1 / root, -2 / root, 3 / root, -4 / root)));
}
<file_sep>/src/Xyz/RandomGenerator.cpp
//****************************************************************************
// Copyright © 2015 <NAME>. All rights reserved.
// Created by <NAME> on 2015-04-06.
//
// This file is distributed under the BSD License.
// License text is included with the source distribution.
//****************************************************************************
#include "Xyz/RandomGenerator.hpp"
namespace Xyz
{
std::default_random_engine& get_random_engine()
{
static std::default_random_engine engine(
[](){return std::random_device()();}());
return engine;
}
}
<file_sep>/tests/XyzTest/test_LineClipping.cpp
//****************************************************************************
// Copyright © 2017 <NAME>. All rights reserved.
// Created by <NAME> on 24.04.2017.
//
// This file is distributed under the BSD License.
// License text is included with the source distribution.
//****************************************************************************
#include "Xyz/Xyz.hpp"
#include "Ytest/Ytest.hpp"
namespace
{
void test_clip(const Xyz::Rectangle<double>& rectangle,
const Xyz::Vector2D& start,
const Xyz::Vector2D& end,
double expected_t0, double expected_t1)
{
Xyz::LineSegment<double, 2> line_segment(start, end);
auto ts = Xyz::get_clipping_positions(rectangle, line_segment);
Y_EQUIVALENT(ts.first, expected_t0, 1e-12);
Y_EQUIVALENT(ts.second, expected_t1, 1e-12);
}
void test_clipping()
{
Xyz::Rectangle<double> rect({-10, -10}, {20, 20});
Y_CALL(test_clip(rect, {-5, -5}, {5, 5}, 0, 1));
Y_CALL(test_clip(rect, {-22, 0}, {0, 22}, -1, -1));
Y_CALL(test_clip(rect, {-20, -10}, {10, 20}, 1 / 3.0, 2 / 3.0));
Y_CALL(test_clip(rect, {10, 20}, {-20, -10}, 1 / 3.0, 2 / 3.0));
Y_CALL(test_clip(rect, {0, -8}, {0, 16}, 0, 0.75));
Y_CALL(test_clip(rect, {0, 16}, {0, -8}, 0.25, 1.0));
}
Y_SUBTEST("Geometry", test_clipping);
}
<file_sep>/include/Xyz/MatrixDeterminant.hpp
//****************************************************************************
// Copyright © 2019 <NAME>. All rights reserved.
// Created by <NAME> on 2019-08-08.
//
// This file is distributed under the BSD License.
// License text is included with the source distribution.
//****************************************************************************
#pragma once
#include <array>
#include <numeric>
#include "Matrix.hpp"
namespace Xyz
{
namespace Details
{
template <typename T, unsigned N>
T get_submatrix_determinant(
const Matrix<T, N, N>& m,
const std::array<unsigned, 3>& column_indices)
{
constexpr auto r = N - 3;
auto a = m[{r, column_indices[0]}];
auto b = m[{r, column_indices[1]}];
auto c = m[{r, column_indices[2]}];
auto d = m[{r + 1, column_indices[0]}];
auto e = m[{r + 1, column_indices[1]}];
auto f = m[{r + 1, column_indices[2]}];
auto g = m[{r + 2, column_indices[0]}];
auto h = m[{r + 2, column_indices[1]}];
auto i = m[{r + 2, column_indices[2]}];
return a * (e * i - f * h)
- b * (d * i - f * g)
+ c * (d * h - e * g);
}
template <typename T, unsigned M, size_t N>
T get_submatrix_determinant(
const Matrix<T, M, M>& m,
const std::array<unsigned, N>& column_indices)
{
static_assert(N > 3, "Matrix dimension must be greater than 3.");
constexpr auto i = unsigned(M - N);
std::array<unsigned, N - 1> sub_indices;
std::copy(column_indices.begin() + 1, column_indices.end(),
sub_indices.begin());
auto determinant = T();
for (unsigned j = 0; j < N; ++j)
{
if (j > 0)
sub_indices[j - 1] = column_indices[j - 1];
if (m[{i, j}] != 0)
{
auto tmp = m[{i, j}] * get_submatrix_determinant(m, sub_indices);
if (j % 2 == 0)
determinant += tmp;
else
determinant -= tmp;
}
}
return determinant;
}
}
template <typename T, unsigned N>
T get_determinant(const Matrix<T, N, N>& m)
{
static_assert(N > 3, "Matrix dimension must be greater than 3.");
std::array<unsigned, N> sub_indices;
std::iota(sub_indices.begin(), sub_indices.end(), 0);
return Details::get_submatrix_determinant(m, sub_indices);
}
template <typename T>
T get_determinant(const Matrix<T, 3, 3>& m)
{
return m[{0, 0}] * (m[{1, 1}] * m[{2, 2}] - m[{1, 2}] * m[{2, 1}])
+ m[{0, 1}] * (m[{1, 2}] * m[{2, 0}] - m[{1, 0}] * m[{2, 2}])
+ m[{0, 2}] * (m[{1, 0}] * m[{2, 1}] - m[{1, 1}] * m[{2, 0}]);
}
template <typename T>
T get_determinant(const Matrix<T, 2, 2>& m)
{
return m[{0, 0}] * m[{1, 1}] - m[{0, 1}] * m[{1, 0}];
}
}
<file_sep>/CMakeLists.txt
##****************************************************************************
## Copyright © 2015 <NAME>. All rights reserved.
## Created by <NAME> on 2015-12-17.
##
## This file is distributed under the Simplified BSD License.
## License text is included with the source distribution.
##****************************************************************************
cmake_minimum_required(VERSION 3.16)
project(Xyz VERSION 0.2.90)
set(CMAKE_CXX_STANDARD 17)
# Set XYZ_MASTER_PROJECT to ON if Xyz is the root level project.
if (NOT DEFINED XYZ_MASTER_PROJECT)
if (CMAKE_CURRENT_SOURCE_DIR STREQUAL CMAKE_SOURCE_DIR)
set(XYZ_MASTER_PROJECT ON)
message("XYZ version is ${Xyz_VERSION}")
else ()
set(XYZ_MASTER_PROJECT OFF)
endif ()
endif ()
# Test option
option(XYZ_BUILD_TEST "Build tests" ${XYZ_MASTER_PROJECT})
# Install option
option(XYZ_INSTALL "Generate the install target" ${XYZ_MASTER_PROJECT})
add_library(Xyz STATIC
include/Xyz/Clamp.hpp
include/Xyz/ComplexApprox.hpp
include/Xyz/Constants.hpp
include/Xyz/CoordinateSystem.hpp
include/Xyz/Face.hpp
include/Xyz/FloatType.hpp
include/Xyz/HalfEdge.hpp
include/Xyz/InvertMatrix.hpp
include/Xyz/Line.hpp
include/Xyz/LineClipping.hpp
include/Xyz/LineLineIntersection.hpp
include/Xyz/LineSegment.hpp
include/Xyz/Matrix.hpp
include/Xyz/MatrixDeterminant.hpp
include/Xyz/MatrixTransformations.hpp
include/Xyz/Mesh.hpp
include/Xyz/ProjectionMatrix.hpp
include/Xyz/QuadraticEquation.hpp
include/Xyz/RandomGenerator.hpp
include/Xyz/Rectangle.hpp
include/Xyz/SphericalPoint.hpp
include/Xyz/Triangle.hpp
include/Xyz/Utilities.hpp
include/Xyz/Vector.hpp
include/Xyz/Xyz.hpp
include/Xyz/XyzException.hpp
src/Xyz/Face.cpp
src/Xyz/RandomGenerator.cpp
)
include(GNUInstallDirs)
target_include_directories(Xyz
PUBLIC
$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/include>
PRIVATE
${CMAKE_CURRENT_SOURCE_DIR}/src
)
add_library(Xyz::Xyz ALIAS Xyz)
##
## Unit tests
##
if(XYZ_BUILD_TEST)
enable_testing()
add_subdirectory(tests/XyzTest)
add_subdirectory(tests/CatchXyzTest)
endif()
##
## "Export" the current build tree and make it possible for other modules
## in the same build tree to locate it with find_package.
##
export(TARGETS Xyz
NAMESPACE Xyz::
FILE XyzConfig.cmake)
##
## Installation
##
if(XYZ_INSTALL)
install(TARGETS Xyz
EXPORT XyzConfig
LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR}
ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR}
)
install(EXPORT XyzConfig
FILE
XyzConfig.cmake
NAMESPACE
Xyz::
DESTINATION
${CMAKE_INSTALL_LIBDIR}/cmake/Xyz
)
install(
DIRECTORY
include/
DESTINATION
${CMAKE_INSTALL_INCLUDEDIR}
)
endif()
if (DEFINED LOCAL_SOURCE_PACKAGE_DIR OR DEFINED ENV{LOCAL_SOURCE_PACKAGE_DIR})
if (NOT LOCAL_SOURCE_PACKAGE_DIR)
set(LOCAL_SOURCE_PACKAGE_DIR $ENV{LOCAL_SOURCE_PACKAGE_DIR})
endif()
if (XYZ_MASTER_PROJECT)
find_package(Git REQUIRED)
execute_process(
COMMAND ${GIT_EXECUTABLE} rev-list --count HEAD
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
OUTPUT_VARIABLE CURRENT_GIT_REVISION
OUTPUT_STRIP_TRAILING_WHITESPACE
)
add_custom_target(${PROJECT_NAME}_BUILD_SRC_TARBALL
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
COMMAND ${GIT_EXECUTABLE} archive -v --format=tar.gz --prefix=${PROJECT_NAME}/ HEAD > ${LOCAL_SOURCE_PACKAGE_DIR}/${PROJECT_NAME}-${PROJECT_VERSION}.${CURRENT_GIT_REVISION}.tar.gz
COMMAND echo Created ${LOCAL_SOURCE_PACKAGE_DIR}/${PROJECT_NAME}-${PROJECT_VERSION}.${CURRENT_GIT_REVISION}.tar.gz
)
endif()
endif ()
<file_sep>/include/Xyz/SimplexNoise.hpp
//****************************************************************************
// Copyright © 2022 <NAME>. All rights reserved.
// Created by <NAME> on 2022-05-07.
//
// This file is distributed under the BSD License.
// License text is included with the source distribution.
//****************************************************************************
#pragma once
#include <cstdint>
class SimplexNoise
{
public:
SimplexNoise();
double simplex(double x, double y, double z);
double simplex(double x, double y, double z,
int octaves, double persistence);
private:
uint8_t permutation_[512];
};
<file_sep>/include/Xyz/Mesh.hpp
//****************************************************************************
// Copyright © 2022 <NAME>. All rights reserved.
// Created by <NAME> on 2022-06-03.
//
// This file is distributed under the BSD License.
// License text is included with the source distribution.
//****************************************************************************
#pragma once
#include <vector>
#include "Face.hpp"
#include "Vector.hpp"
namespace Xyz
{
template <typename T>
class Mesh
{
public:
using Vertex = Xyz::Vector<T, 3>;
Mesh() = default;
Mesh(std::vector<Vertex> vertexes, std::vector<Face> faces)
: vertexes_(std::move(vertexes)),
faces_(std::move(faces))
{
assign_face_ids(faces_);
}
[[nodiscard]]
const std::vector<Vertex>& vertexes() const
{
return vertexes_;
}
void add_vertex(const Vertex& v)
{
vertexes_.push_back(v);
}
void set_vertex(size_t n, const Vertex& v)
{
vertexes_[n] = v;
}
void set_vertexes(std::vector<Vertex> vertexes)
{
vertexes_ = std::move(vertexes);
}
[[nodiscard]]
const std::vector<Face>& faces() const
{
return faces_;
}
void add_face(const Face& face)
{
faces_.push_back(face);
faces_.back().set_id(FaceId(faces_.size() - 1));
}
void set_faces(std::vector<Face> faces)
{
faces_ = std::move(faces);
assign_face_ids(faces_);
}
Vertex normal(const Face& face) const
{
return get_unit(cross(vertexes_[face[1]] - vertexes_[face[0]],
vertexes_[face[2]] - vertexes_[face[1]]));
}
private:
void assign_face_ids(std::vector<Face>& faces)
{
FaceId n = 0;
std::for_each(faces.begin(), faces.end(),
[&n](auto& f) {f.set_id(n++);});
}
std::vector<Vertex> vertexes_;
std::vector<Face> faces_;
};
}
<file_sep>/include/Xyz/HalfEdge.hpp
//****************************************************************************
// Copyright © 2022 <NAME>. All rights reserved.
// Created by <NAME> on 2022-06-03.
//
// This file is distributed under the BSD License.
// License text is included with the source distribution.
//****************************************************************************
#pragma once
#include <algorithm>
#include <cstdint>
namespace Xyz
{
using VertexId = uint32_t;
using FaceId = uint32_t;
using HalfEdgeId = uint32_t;
constexpr VertexId INVALID_VERTEX_ID = ~VertexId(0);
constexpr FaceId INVALID_FACE_ID = ~FaceId(0);
constexpr HalfEdgeId INVALID_HALF_EDGE_ID = ~HalfEdgeId(0);
struct HalfEdge
{
public:
HalfEdge()
: HalfEdge(INVALID_VERTEX_ID, INVALID_VERTEX_ID,
INVALID_HALF_EDGE_ID)
{}
HalfEdge(VertexId a, VertexId b, HalfEdgeId id)
: vertex_ids_{a, b},
id_(id)
{}
[[nodiscard]]
VertexId operator[](size_t i) const
{
return vertex_ids_[i];
}
VertexId& operator[](size_t i)
{
return vertex_ids_[i];
}
[[nodiscard]]
HalfEdgeId id() const
{
return id_;
}
void set_id(HalfEdgeId id)
{
id_ = id;
}
private:
VertexId vertex_ids_[2];
HalfEdgeId id_;
};
inline bool operator==(const HalfEdge& a, const HalfEdge& b)
{
return a[0] == b[0]
&& a[1] == b[1]
&& a.id() == b.id();
}
inline bool operator!=(const HalfEdge& a, const HalfEdge& b)
{
return !(a == b);
}
inline bool operator<(const HalfEdge& a, const HalfEdge& b)
{
auto a0 = a[0], a1 = a[1];
if (a0 > a1)
std::swap(a0, a1);
auto b0 = b[0], b1 = b[1];
if (b0 > b1)
std::swap(b0, b1);
if (a0 != b0)
return a0 < b0;
if (a1 != b1)
return a1 < b1;
return a[0] < b[0];
}
inline bool are_equal(const HalfEdge& a, const HalfEdge& b)
{
return a[0] == b[0] && a[1] == b[1];
}
inline bool are_opposites(const HalfEdge& a, const HalfEdge& b)
{
return a[0] == b[1] && a[1] == b[0];
}
}
<file_sep>/misc/removed/Xyz/Generators/RegularPolygonGenerator.hpp
//****************************************************************************
// Copyright © 2019 <NAME>. All rights reserved.
// Created by <NAME> on 2019-08-23.
//
// This file is distributed under the BSD License.
// License text is included with the source distribution.
//****************************************************************************
#pragma once
#include "Xyz/Vector.hpp"
namespace Xyz
{
class RegularPolygonGenerator
{
public:
double radius() const
{
return m_Radius;
}
RegularPolygonGenerator& setRadius(double radius)
{
m_Radius = radius;
return *this;
}
double angle() const
{
return m_Angle;
}
RegularPolygonGenerator& setAngle(double angle)
{
m_Angle = angle;
return *this;
}
const Vector2d& center() const
{
return m_Center;
}
RegularPolygonGenerator& setCenter(const Vector2d& center)
{
m_Center = center;
return *this;
}
size_t numberOfPoints() const
{
return m_NumberOfPoints;
}
RegularPolygonGenerator& setNumberOfPoints(size_t numberOfPoints)
{
m_NumberOfPoints = numberOfPoints;
return *this;
}
bool isClosed() const
{
return m_IsClosed;
}
RegularPolygonGenerator& setIsClosed(bool isClosed)
{
m_IsClosed = isClosed;
return *this;
}
template <typename T>
std::vector<Vector<T, 2>> generate() const
{
if (m_NumberOfPoints < 3)
return {};
std::vector<Vector<T, 2>> result;
result.reserve(m_NumberOfPoints + (m_IsClosed ? 1 : 0));
for (size_t i = 0; i < m_NumberOfPoints; ++i)
{
auto angle = m_Angle + 2 * i * Constants<T>::PI / m_NumberOfPoints;
result.push_back({T(m_Center[0] + m_Radius * cos(angle)),
T(m_Center[1] + m_Radius * sin(angle))});
}
if (m_IsClosed && !result.empty())
result.push_back(result.front());
return result;
}
private:
double m_Radius = 1.0;
double m_Angle = 0.0;
Vector2d m_Center;
size_t m_NumberOfPoints = 4;
bool m_IsClosed = false;
};
}
<file_sep>/include/Xyz/Approx.hpp
//****************************************************************************
// Copyright © 2021 <NAME>. All rights reserved.
// Created by <NAME> on 2021-03-23.
//
// This file is distributed under the BSD License.
// License text is included with the source distribution.
//****************************************************************************
#pragma once
#include <algorithm>
#include <cstdlib>
#include <limits>
#include <type_traits>
namespace Xyz
{
template <typename T,
std::enable_if_t<std::is_floating_point_v<T>, int> = 0>
struct Approx
{
constexpr static T DEFAULT_MARGIN = 100 * std::numeric_limits<T>::epsilon();
constexpr explicit Approx(T value, T margin = DEFAULT_MARGIN) noexcept
: value(value),
margin(margin)
{}
T value;
T margin;
};
template <typename T, typename U,
std::enable_if_t<std::is_arithmetic_v<U>, int> = 0>
constexpr bool operator==(U v, Approx<T> m)
{
return v + m.margin >= m.value && m.value + m.margin >= v;
}
template <typename T, typename U,
std::enable_if_t<std::is_arithmetic_v<U>, int> = 0>
constexpr bool operator==(Approx<T> m, U v)
{
return v + m.margin >= m.value && m.value + m.margin >= v;
}
template <typename T, typename U>
constexpr bool operator==(Approx<T> a, Approx<U> b)
{
using V = decltype(T() + U());
return Approx<V>(a.value, std::max<V>(a.margin, b.margin)) == b.value;
}
template <typename T, typename U,
std::enable_if_t<std::is_arithmetic_v<U>, int> = 0>
constexpr bool operator!=(U v, Approx<T> m)
{
return !(v == m);
}
template <typename T, typename U,
std::enable_if_t<std::is_arithmetic_v<U>, int> = 0>
constexpr bool operator!=(Approx<T> m, U v)
{
return !(m == v);
}
template <typename T, typename U>
constexpr bool operator!=(Approx<T> a, Approx<U> b)
{
using V = decltype(T() + U());
return Approx<V>(a.value, std::max<V>(a.margin, b.margin)) != b.value;
}
template <typename T, typename U,
std::enable_if_t<std::is_arithmetic_v<U>, int> = 0>
constexpr bool operator<(Approx<T> m, U v)
{
return m.value + m.margin < v;
}
template <typename T, typename U,
std::enable_if_t<std::is_arithmetic_v<U>, int> = 0>
constexpr bool operator<(U v, Approx<T> m)
{
return m.value - m.margin > v;
}
template <typename T, typename U>
constexpr bool operator<(Approx<T> a, Approx<U> b)
{
using V = decltype(T() + U());
return Approx<V>(a.value, std::max<V>(a.margin, b.margin)) < b.value;
}
template <typename T, typename U,
std::enable_if_t<std::is_arithmetic_v<U>, int> = 0>
constexpr bool operator>(Approx<T> m, U v)
{
return v < m;
}
template <typename T, typename U,
std::enable_if_t<std::is_arithmetic_v<U>, int> = 0>
constexpr bool operator>(U v, Approx<T> m)
{
return m < v;
}
template <typename T, typename U>
constexpr bool operator>(Approx<T> a, Approx<U> b)
{
using V = decltype(T() + U());
return Approx<V>(a.value, std::max<V>(a.margin, b.margin)) > b.value;
}
template <typename T, typename U,
std::enable_if_t<std::is_arithmetic_v<U>, int> = 0>
constexpr bool operator<=(Approx<T> m, U v)
{
return !(v < m);
}
template <typename T, typename U,
std::enable_if_t<std::is_arithmetic_v<U>, int> = 0>
constexpr bool operator<=(U v, Approx<T> m)
{
return !(m < v);
}
template <typename T, typename U>
constexpr bool operator<=(Approx<T> a, Approx<U> b)
{
using V = decltype(T() + U());
return Approx<V>(a.value, std::max<V>(a.margin, b.margin)) <= b.value;
}
template <typename T, typename U,
std::enable_if_t<std::is_arithmetic_v<U>, int> = 0>
constexpr bool operator>=(Approx<T> m, U v)
{
return !(m < v);
}
template <typename T, typename U,
std::enable_if_t<std::is_arithmetic_v<U>, int> = 0>
constexpr bool operator>=(U v, Approx<T> m)
{
return !(v < m);
}
template <typename T, typename U>
constexpr bool operator>=(Approx<T> a, Approx<U> b)
{
using V = decltype(T() + U());
return Approx<V>(a.value, std::max<V>(a.margin, b.margin)) >= b.value;
}
}
<file_sep>/tests/XyzTest/XyzTestMain.cpp
//****************************************************************************
// Copyright © 2015 <NAME>. All rights reserved.
// Created by <NAME> on 18.12.2015
//
// This file is distributed under the BSD License.
// License text is included with the source distribution.
//****************************************************************************
#include "Ytest/YtestMain.hpp"
<file_sep>/include/Xyz/LineClipping.hpp
//****************************************************************************
// Copyright © 2017 <NAME>. All rights reserved.
// Created by <NAME> on 24.04.2017.
//
// This file is distributed under the BSD License.
// License text is included with the source distribution.
//****************************************************************************
#pragma once
#include <utility>
#include "LineSegment.hpp"
#include "Rectangle.hpp"
namespace Xyz
{
constexpr unsigned OUTCODE_INSIDE = 0;
constexpr unsigned OUTCODE_LEFT = 0b0001;
constexpr unsigned OUTCODE_RIGHT = 0b0010;
constexpr unsigned OUTCODE_BOTTOM = 0b0100;
constexpr unsigned OUTCODE_TOP = 0b1000;
template <typename T>
unsigned compute_clipping_outcode(const Rectangle<T>& rectangle,
const Vector<T, 2>& point)
{
auto [x, y] = point;
auto [x0, y0] = get_min(rectangle);
auto [x1, y1] = get_max(rectangle);
unsigned code = OUTCODE_INSIDE;
if (x1 < x)
code = OUTCODE_RIGHT;
else if (x < x0)
code = OUTCODE_LEFT;
if (y1 < y)
code += OUTCODE_TOP;
else if (y < y0)
code += OUTCODE_BOTTOM;
return code;
}
/** @brief Returns the relative start and end positions of @a line
* inside @a rectangle.
*
* Uses the Cohen-Sutherland algorithm to compute the relative positions.
*
* @tparam T a numeric type.
* @param rectangle the clipping rectangle.
* @param line the line that will be clipped.
* @return the relative start and end positions of the part of @a line
* that lies inside @a rectangle. Both positions will be between
* 0 and 1, unless @a line is completely outside @a rectangle in
* which case both positions are -1.
*/
template <typename T>
std::pair<double, double> get_clipping_positions(
const Rectangle<T>& rectangle, const LineSegment<T, 2>& line)
{
auto startCode = compute_clipping_outcode(rectangle, line.start());
auto endCode = compute_clipping_outcode(rectangle, line.end());
double tStart = 0.0, tEnd = 1.0;
for (;;)
{
if (!(startCode | endCode))
return {tStart, tEnd};
if (startCode & endCode)
return {-1.0, -1.0};
auto start = line.start();
auto vector = line.end() - line.start();
auto bottomLeft = get_min(rectangle);
auto topRight = get_max(rectangle);
unsigned code = startCode ? startCode : endCode;
double t;
if (code & OUTCODE_TOP)
t = (get<1>(topRight) - get<1>(start)) / get<1>(vector);
else if (code & OUTCODE_BOTTOM)
t = (get<1>(bottomLeft) - get<1>(start)) / get<1>(vector);
else if (code & OUTCODE_LEFT)
t = (get<0>(bottomLeft) - get<0>(start)) / get<0>(vector);
else
t = (get<0>(topRight) - get<0>(start)) / get<0>(vector);
auto point = start + t * vector;
if (code == startCode)
{
tStart = t;
startCode = compute_clipping_outcode(rectangle, point);
}
else
{
tEnd = t;
endCode = compute_clipping_outcode(rectangle, point);
}
}
}
}
<file_sep>/include/Xyz/Clamp.hpp
//****************************************************************************
// Copyright © 2015 <NAME>. All rights reserved.
// Created by <NAME> on 2015-12-17.
//
// This file is distributed under the BSD License.
// License text is included with the source distribution.
//****************************************************************************
#pragma once
#include <algorithm>
#include <cmath>
#include "Constants.hpp"
namespace Xyz
{
template <typename T>
[[nodiscard]] T clamp(T value, T min, T max)
{
return std::min(max, std::max(min, value));
}
template <typename It, typename T>
void scale_range(It first, It last, T new_min, T new_max)
{
if (first == last)
return;
auto its = std::minmax_element(first, last);
auto [cur_min, cur_max] = *its;
auto cur_range = cur_max - cur_min;
if (cur_range == 0)
{
std::fill(first, last, new_min);
return;
}
auto new_range = new_max - new_min;
for (auto it = first; it != last; ++it)
*it = new_min + (*it - cur_min) * new_range / cur_range;
}
template <typename It, typename T>
void clamp_range(It first, It last, T newMin, T new_max)
{
for (auto it = first; it != last; ++it)
*it = Xyz::clamp(*it, newMin, new_max);
}
}
<file_sep>/tests/XyzTest/test_Matrix.cpp
//****************************************************************************
// Copyright © 2015 <NAME>. All rights reserved.
// Created by <NAME> on 18.12.2015
//
// This file is distributed under the BSD License.
// License text is included with the source distribution.
//****************************************************************************
#include "Xyz/Matrix.hpp"
#include "Ytest/Ytest.hpp"
#include "Xyz/Vector.hpp"
//#include "Xyz/Utilities/Utilities.hpp"
namespace
{
using namespace Xyz;
void test_basics()
{
auto mat = Matrix3D{1, 0, 0,
0, 1, 0,
0, 0, 1};
Y_EQUAL(mat[RowCol(0, 0)], 1.0);
Y_EQUAL(mat[RowCol(0, 1)], 0.0);
Y_EQUAL(mat[RowCol(1, 1)], 1.0);
}
void test_transpose()
{
auto mat1 = Matrix4D{0, 1, 2, 3,
4, 5, 6, 7,
8, 9, 0, 1,
2, 3, 4, 5};
auto mat1t = Matrix4D{0, 4, 8, 2,
1, 5, 9, 3,
2, 6, 0, 4,
3, 7, 1, 5};
Y_EQUAL(transpose(mat1), mat1t);
transpose_inplace(mat1t);
Y_EQUAL(mat1, mat1t);
}
void test_matrix_matrix_addition()
{
auto m1 = Matrix3I{1, 2, 3,
4, 5, 6,
7, 8, 9};
auto m2 = Matrix3I{ 1, -1, 1,
-1, 1, -1,
1, -1, 1};
auto product = Matrix3I{ 2, 1, 4,
3, 6, 5,
8, 7, 10};
Y_EQUAL(m1 + m2, product);
}
void test_matrix_matrix_subtraction()
{
auto m1 = Matrix3I{1, 2, 3,
4, 5, 6,
7, 8, 9};
auto m2 = Matrix3I{ 1, -1, 1,
-1, 1, -1,
1, -1, 1};
auto product = Matrix3I{0, 3, 2,
5, 4, 7,
6, 9, 8};
Y_EQUAL(m1 - m2, product);
}
void test_matrix_matrix_multiplication()
{
auto m1 = Matrix2I{1, 2,
3, 4};
auto m2 = Matrix2I{4, 3,
1, 2};
auto product = Matrix2I{ 6, 7,
16, 17};
Y_EQUAL(m1 * m2, product);
}
void test_matrix_vector_multiplication()
{
auto m = Matrix3I{1, 2, 3,
4, 5, 6,
7, 8, 9};
auto v = Vector3I{1, 2, 3};
auto product1 = Vector3I{14, 32, 50};
auto product2 = Vector3I{30, 36, 42};
Y_EQUAL(m * v, product1);
Y_EQUAL(v * m, product2);
}
void test_make_submatrix()
{
Matrix4I m{1, 2, 3, 4,
5, 6, 7, 8,
9, 0, 1, 2,
3, 4, 5, 6};
auto s = make_submatrix<3, 3>(m, 3, 2);
Matrix3I e{5, 6, 3,
3, 4, 1,
7, 8, 5};
Y_EQUAL(s, e);
}
void test_identity()
{
auto m1 = make_identity_matrix<int, 4>();
Y_EQUAL(m1[RowCol(0, 0)], 1);
Y_EQUAL(m1[RowCol(0, 1)], 0);
}
Y_SUBTEST("Fundamentals",
test_basics,
test_transpose,
test_matrix_matrix_addition,
test_matrix_matrix_subtraction,
test_matrix_matrix_multiplication,
test_matrix_vector_multiplication,
test_make_submatrix,
test_identity);
}
<file_sep>/include/Xyz/Line.hpp
//****************************************************************************
// Copyright © 2016 <NAME>. All rights reserved.
// Created by <NAME> on 2016-02-07.
//
// This file is distributed under the BSD License.
// License text is included with the source distribution.
//****************************************************************************
#pragma once
#include "LineSegment.hpp"
#include "Vector.hpp"
namespace Xyz
{
template <typename T, unsigned N>
class Line
{
public:
Line() = default;
Line(const Vector<T, N>& point, const Vector<T, N>& vector)
: m_point(point),
m_vector(vector)
{}
const Vector<T, N>& point() const
{
return m_point;
}
void set_point(const Vector<T, N>& point)
{
m_point = point;
}
const Vector<T, N>& vector() const
{
return m_vector;
}
void set_vector(const Vector<T, N>& vector)
{
m_vector = vector;
}
private:
Vector<T, N> m_point;
Vector<T, N> m_vector;
};
template <typename T, unsigned N>
const Vector<T, N>& get_point(const Line<T, N>& line)
{
return line.point();
}
template <typename T, unsigned N>
const Vector<T, N>& get_vector(const Line<T, N>& line)
{
return line.vector();
}
template <typename T, unsigned N>
std::ostream& operator<<(std::ostream& os, const Line<T, N>& line)
{
return os << "{\"vertex\": " << get_point(line)
<< ", \"vector\": " << get_vector(line) << "}";
}
template <typename T, unsigned N>
Line<T, N> make_line(const Vector<T, N>& point,
const Vector<T, N>& vector)
{
return Line<T, N>(point, vector);
}
template <typename T, unsigned N>
Line<T, N> make_line(const LineSegment<T, N>& line_segment)
{
return make_line(get_start(line_segment), get_vector(line_segment));
}
}
<file_sep>/tests/XyzTest/test_Intersections.cpp
//****************************************************************************
// Copyright © 2016 <NAME>. All rights reserved.
// Created by <NAME> on 13.02.2016
//
// This file is distributed under the BSD License.
// License text is included with the source distribution.
//****************************************************************************
#include "Xyz/Xyz.hpp"
#include "Ytest/Ytest.hpp"
namespace {
using namespace Xyz;
template <typename T>
Xyz::LineSegment<T, 2> make_line_segment(T x1, T y1, T x2, T y2)
{
return Xyz::make_line_segment(make_vector2(x1, y1), make_vector2(x2, y2));
}
static void test_get_intersection_factors()
{
auto a = make_line_segment<int>(-4, -2, 4, 2);
auto b = make_line_segment<int>(3, -9, -1, 3);
auto intersection = get_intersection_positions(a, b, 1e-10);
Y_EQUAL(std::get<0>(intersection), LineRelationship::INTERSECTING);
Y_EQUAL(std::get<1>(intersection), 0.5);
Y_EQUAL(std::get<2>(intersection), 0.75);
}
Y_SUBTEST("Geometry",
test_get_intersection_factors
);
}
<file_sep>/include/Xyz/Rectangle.hpp
//****************************************************************************
// Copyright © 2016 <NAME>. All rights reserved.
// Created by <NAME> on 16.01.2012.
//
// This file is distributed under the BSD License.
// License text is included with the source distribution.
//****************************************************************************
#pragma once
#include <vector>
#include "Vector.hpp"
namespace Xyz
{
template <typename T>
class Rectangle
{
public:
Rectangle() = default;
Rectangle(const Vector<T, 2>& origin, const Vector<T, 2>& size)
: origin(origin), size(size)
{}
Vector<T, 2> origin;
Vector<T, 2> size;
};
template <typename T>
[[nodiscard]]
bool operator==(const Rectangle<T>& a, const Rectangle<T>& b)
{
return a.origin == b.origin && a.size == b.size;
}
template <typename T>
[[nodiscard]]
bool operator!=(const Rectangle<T>& a, const Rectangle<T>& b)
{
return a.origin != b.origin || a.size == b.size;
}
template <typename T>
[[nodiscard]]
std::ostream& operator<<(std::ostream& os, const Rectangle<T>& rect)
{
return os << '{' << rect.origin << ", " << rect.size << "}";
}
template <typename T>
[[nodiscard]]
bool is_empty(const Rectangle<T>& rect)
{
return rect.size[0] == 0 || rect.size[1] == 0;
}
template <typename T>
[[nodiscard]]
Vector<T, 2> get_bottom_left(const Rectangle<T>& rect)
{
return rect.origin;
}
template <typename T>
[[nodiscard]]
Vector<T, 2> get_bottom_right(const Rectangle<T>& rect)
{
return rect.origin + Vector<T, 2>{rect.size[0], 0};
}
template <typename T>
[[nodiscard]]
Vector<T, 2> get_top_left(const Rectangle<T>& rect)
{
return rect.origin + Vector<T, 2>{0, rect.size[1]};
}
template <typename T>
[[nodiscard]]
Vector<T, 2> get_top_right(const Rectangle<T>& rect)
{
return rect.origin + rect.size;
}
template <typename T>
[[nodiscard]]
Vector<T, 2> get_min(const Rectangle<T>& rect)
{
auto [w, h] = rect.size;
if (0 <= w && 0 <= h)
return rect.origin;
auto [x, y] = rect.origin;
if (0 <= w)
return {x, y + h};
else if (0 <= h)
return {x + w, y};
else
return rect.origin + rect.size;
}
template <typename T>
[[nodiscard]]
Vector<T, 2> get_max(const Rectangle<T>& rect)
{
auto [w, h] = rect.size;
if (0 <= w && 0 <= h)
return rect.origin + rect.size;
auto [x, y] = rect.origin;
if (0 <= w)
return {x + w, y};
else if (0 <= h)
return {x, y + h};
else
return rect.origin;
}
template <typename T>
[[nodiscard]]
Vector<T, 2> get_center(const Rectangle<T>& rect)
{
return rect.origin + rect.size / 2;
}
template <typename T>
void set_center(Rectangle<T>& rect, const Vector<T, 2>& center)
{
rect.origin = center - rect.size / 2;
}
template <typename T>
[[nodiscard]]
Rectangle<T> offset(Rectangle<T> rect, const Vector<T, 2>& delta)
{
rect.origin += delta;
return rect;
}
template <typename T>
[[nodiscard]]
Rectangle<T> normalize(const Rectangle<T>& rectangle)
{
auto [x, y] = rectangle.origin;
auto [w, h] = rectangle.size;
if (w < 0)
{
x += w;
w = -w;
}
if (h < 0)
{
y += h;
h = -h;
}
return Rectangle<T>({x, y}, {w, h});
}
using RectangleI = Rectangle<int>;
using RectangleF = Rectangle<float>;
using RectangleD = Rectangle<double>;
}
<file_sep>/include/Xyz/XyzException.hpp
//****************************************************************************
// Copyright © 2019 <NAME>. All rights reserved.
// Created by <NAME> on 2019-07-20.
//
// This file is distributed under the BSD License.
// License text is included with the source distribution.
//****************************************************************************
#pragma once
#include <stdexcept>
#include <string>
namespace Xyz
{
class XyzException : public std::runtime_error
{
public:
using std::runtime_error::runtime_error;
};
}
#define _XYZ_THROW_3(file, line, msg) \
throw ::Xyz::XyzException(file ":" #line ": " msg)
#define _XYZ_THROW_2(file, line, msg) \
_XYZ_THROW_3(file, line, msg)
#define XYZ_THROW(msg) \
_XYZ_THROW_2(__FILE__, __LINE__, msg)
<file_sep>/tests/XyzTest/CMakeLists.txt
##****************************************************************************
## Copyright © 2015 <NAME>. All rights reserved.
## Created by <NAME> on 2015-12-18.
##
## This file is distributed under the Simplified BSD License.
## License text is included with the source distribution.
##****************************************************************************
cmake_minimum_required(VERSION 3.13)
include(FetchContent)
FetchContent_Declare(ytest
GIT_REPOSITORY "https://github.com/jebreimo/Ytest.git"
GIT_TAG v0.1.4)
FetchContent_MakeAvailable(ytest)
add_executable(XyzTest
XyzTestMain.cpp
YtestUtilities.hpp
test_CoordinateSystem.cpp
test_Intersections.cpp
test_InvertMatrix.cpp
test_LineClipping.cpp
test_Matrix.cpp
test_MatrixDeterminant.cpp
test_Projections.cpp
test_Transforms.cpp
test_Triangle.cpp
test_Vector.cpp
)
target_link_libraries(XyzTest
Xyz::Xyz
Ytest::Ytest)
add_test(NAME XyzTest COMMAND XyzTest)
<file_sep>/include/Xyz/Constants.hpp
//****************************************************************************
// Copyright © 2015 <NAME>. All rights reserved.
// Created by <NAME> on 2015-12-17.
//
// This file is distributed under the BSD License.
// License text is included with the source distribution.
//****************************************************************************
#pragma once
#include <cfloat>
#include <climits>
#include <cstdint>
namespace Xyz
{
template <typename T>
struct Constants
{
constexpr static double PI = 3.141592653589793;
constexpr static double DEFAULT_MARGIN = DBL_EPSILON * 100.0;
};
template <>
struct Constants<float>
{
constexpr static float PI = 3.14159265f;
constexpr static float DEFAULT_MARGIN = FLT_EPSILON * 100.0;
};
}
<file_sep>/tests/XyzTest/test_MatrixDeterminant.cpp
//****************************************************************************
// Copyright © 2019 <NAME>. All rights reserved.
// Created by <NAME> on 2019-07-31.
//
// This file is distributed under the BSD License.
// License text is included with the source distribution.
//****************************************************************************
#include "Xyz/MatrixDeterminant.hpp"
#include "Ytest/Ytest.hpp"
namespace
{
using namespace Xyz;
void test_get_determinant3()
{
Matrix3I m {1, 4, 3,
2, 1, 5,
3, 2, 1};
Y_EQUAL(get_determinant(m), 46);
}
void test_get_determinant4()
{
Matrix4I m{1, -2, 3, 2,
2, 3, 1, -1,
3, 3, 3, 3,
-1, 4, 2, 1};
Y_EQUAL(get_determinant(m), -141);
}
Y_TEST(test_get_determinant3,
test_get_determinant4);
}
<file_sep>/include/Xyz/RandomGenerator.hpp
//****************************************************************************
// Copyright © 2015 <NAME>. All rights reserved.
// Created by <NAME> on 2015-04-06.
//
// This file is distributed under the BSD License.
// License text is included with the source distribution.
//****************************************************************************
#pragma once
#include <limits>
#include <random>
namespace Xyz
{
std::default_random_engine& get_random_engine();
template <typename IntT>
class RandomIntGenerator
{
public:
explicit RandomIntGenerator(
IntT min = 0,
IntT max = std::numeric_limits<IntT>::max())
: m_engine(get_random_engine()), m_dist(min, max)
{}
explicit RandomIntGenerator(
std::default_random_engine& engine,
IntT min = 0,
IntT max = std::numeric_limits<IntT>::max())
: m_engine(get_random_engine()), m_dist(min, max)
{}
IntT operator()()
{
return m_dist(m_engine);
}
private:
std::default_random_engine& m_engine;
std::uniform_int_distribution<IntT> m_dist;
};
template <typename RealT>
class RandomRealGenerator
{
public:
explicit RandomRealGenerator(RealT min = 0.0, RealT max = 1.0)
: m_engine(get_random_engine()),
m_dist(min,
std::nextafter(max, std::numeric_limits<RealT>::max()))
{}
explicit RandomRealGenerator(std::default_random_engine& engine,
RealT min = 0.0,
RealT max = 1.0)
: m_engine(get_random_engine()),
m_dist(min,
std::nextafter(max, std::numeric_limits<RealT>::max()))
{}
RealT operator()()
{
return m_dist(m_engine);
}
private:
std::default_random_engine& m_engine;
std::uniform_real_distribution<RealT> m_dist;
};
}
<file_sep>/install.sh
#!/bin/bash
XYZ_DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
if [ ! -d release.Xyz ]
then
mkdir release.Xyz
fi
cd release.Xyz
cmake -DCMAKE_BUILD_TYPE=Release $XYZ_DIR
make install
cd ..
if [ ! -d debug.Xyz ]
then
mkdir debug.Xyz
fi
cd debug.Xyz
cmake -DCMAKE_BUILD_TYPE=Debug $XYZ_DIR
make install
cd ..
<file_sep>/misc/removed/Xyz/Geometry/_LineString.hpp
#ifndef JEB_MATH_LINESTRING_HPP
#define JEB_MATH_LINESTRING_HPP
#include <iosfwd>
#include <vector>
#include "Xyz/LineSegment.hpp"
namespace JEBMath {
template <typename T, unsigned N>
LineSegment<T, N> getSegment(const std::vector<Vector<T, N>>& lineString,
size_t i);
template <typename T, unsigned N>
LineSegment<T, N> getFirstSegment(
const std::vector<Vector<T, N>>& lineString);
template <typename T, unsigned N>
LineSegment<T, N> getLastSegment(
const std::vector<Vector<T, N>>& lineString);
template <typename T, unsigned N>
void removeSegment(std::vector<Vector<T, N>>& lineString, size_t i);
template <typename T, unsigned N>
size_t getSegmentCount(const std::vector<Vector<T, N>>& lineString);
template <typename T, unsigned N>
std::pair<Vector<T, N>, Vector<T, N>> getBoundingBox(
const std::vector<Vector<T, N>>& lineString);
template <typename T, unsigned N>
std::ostream& operator<<(std::ostream& os,
const std::vector<Vector<T, N>>& ls);
}
#include "LineString-impl.hpp"
#endif
<file_sep>/tests/XyzTest/test_Transforms.cpp
//****************************************************************************
// Copyright © 2015 <NAME>. All rights reserved.
// Created by <NAME> on 18.12.2015
//
// This file is distributed under the BSD License.
// License text is included with the source distribution.
//****************************************************************************
#include "Xyz/Xyz.hpp"
#include "YtestUtilities.hpp"
namespace {
void test_rotate_z()
{
auto trans = Xyz::rotate_z(Xyz::to_radians(90.0));
auto v = trans * Xyz::make_vector4<double>(1, 1, 1, 1);
Y_EQUIVALENT(v, Xyz::make_vector4<double>(-1, 1, 1, 1), 1e-10);
}
void test_translate4()
{
auto trans = Xyz::translate4(1.0, 2.0, 3.0);
auto v = trans * Xyz::make_vector4<double>(1, 1, 1, 1);
Y_EQUAL(v, Xyz::make_vector4<double>(2, 3, 4, 1));
}
void test_rotate_z_and_transposed_translate4()
{
auto trans = multiply_transposed(
Xyz::rotate_z(Xyz::to_radians(90.0)),
Xyz::transposed_translate4<double>(1, 2, 3));
auto v = trans * Xyz::make_vector4<double>(1, 1, 1, 1);
Y_EQUAL(v, Xyz::make_vector4<double>(-3, 2, 4, 1));
}
Y_SUBTEST("Fundamentals",
test_rotate_z,
test_translate4,
test_rotate_z_and_transposed_translate4);
}
<file_sep>/include/Xyz/Face.hpp
//****************************************************************************
// Copyright © 2022 <NAME>. All rights reserved.
// Created by <NAME> on 2022-06-03.
//
// This file is distributed under the BSD License.
// License text is included with the source distribution.
//****************************************************************************
#pragma once
#include "HalfEdge.hpp"
namespace Xyz
{
struct Face
{
public:
Face()
: vertex_ids_{INVALID_FACE_ID, INVALID_FACE_ID, INVALID_FACE_ID},
id_(INVALID_FACE_ID)
{}
Face(VertexId a, VertexId b, VertexId c)
: vertex_ids_{a, b, c},
id_(INVALID_FACE_ID)
{}
Face(VertexId a, VertexId b, VertexId c, FaceId faceId)
: vertex_ids_{a, b, c},
id_(faceId)
{}
[[nodiscard]]
VertexId operator[](size_t i) const
{
return vertex_ids_[i];
}
VertexId& operator[](size_t i)
{
return vertex_ids_[i];
}
[[nodiscard]]
FaceId id() const
{
return id_;
}
void set_id(FaceId id)
{
id_ = id;
}
[[nodiscard]]
VertexId* begin()
{
return vertex_ids_;
}
[[nodiscard]]
VertexId* end()
{
return vertex_ids_ + 3;
}
[[nodiscard]]
const VertexId* begin() const
{
return vertex_ids_;
}
[[nodiscard]]
const VertexId* end() const
{
return vertex_ids_ + 3;
}
private:
VertexId vertex_ids_[3];
FaceId id_;
};
[[nodiscard]]
HalfEdge get_half_edge(const Face& face, uint32_t edge_index);
[[nodiscard]]
std::array<HalfEdge, 3> make_half_edges(const Face& face);
}
<file_sep>/tests/XyzTest/YtestUtilities.hpp
//****************************************************************************
// Copyright © 2016 <NAME>. All rights reserved.
// Created by <NAME> on 02.01.2016
//
// This file is distributed under the BSD License.
// License text is included with the source distribution.
//****************************************************************************
#pragma once
#include "Xyz/Xyz.hpp"
#include "Ytest/Ytest.hpp"
namespace Ytest
{
template <typename T, unsigned N>
bool equivalent(const Xyz::Vector<T, N>& u, const Xyz::Vector<T, N>& v,
T margin)
{
return Xyz::are_equivalent(u, v, margin);
}
template <typename T, unsigned N>
bool equivalent(Xyz::Vector<T, N>& u, Xyz::Vector<T, N>& v, T margin)
{
return Xyz::are_equivalent(u, v, margin);
}
}
<file_sep>/include/Xyz/InvertMatrix.hpp
//****************************************************************************
// Copyright © 2019 <NAME>. All rights reserved.
// Created by <NAME> on 2019-08-08.
//
// This file is distributed under the BSD License.
// License text is included with the source distribution.
//****************************************************************************
#pragma once
#include <array>
#include "Matrix.hpp"
namespace Xyz
{
namespace Details
{
template <typename T>
Matrix<T, 3, 3> get_transposed_cofactors(const Matrix<T, 3, 3>& m)
{
return Matrix<T, 3, 3>{
m[{1, 1}] * m[{2, 2}] - m[{1, 2}] * m[{2, 1}],
-(m[{0, 1}] * m[{2, 2}] - m[{0, 2}] * m[{2, 1}]),
m[{0, 1}] * m[{1, 2}] - m[{0, 2}] * m[{1, 1}],
-(m[{1, 0}] * m[{2, 2}] - m[{1, 2}] * m[{2, 0}]),
m[{0, 0}] * m[{2, 2}] - m[{0, 2}] * m[{2, 0}],
-(m[{0, 0}] * m[{1, 2}] - m[{0, 2}] * m[{1, 0}]),
m[{1, 0}] * m[{2, 1}] - m[{1, 1}] * m[{2, 0}],
-(m[{0, 0}] * m[{2, 1}] - m[{0, 1}] * m[{2, 0}]),
m[{0, 0}] * m[{1, 1}] - m[{0, 1}] * m[{1, 0}]
};
}
template <typename T>
T get_cofactor(const Matrix<T, 4, 4>& m, const std::array<T, 7>& a,
unsigned row, const std::array<unsigned, 3>& columns)
{
return m[{row, columns[0]}] * a[columns[1] * 2 + columns[2] - 1]
- m[{row, columns[1]}] * a[columns[0] * 2 + columns[2] - 1]
+ m[{row, columns[2]}] * a[columns[0] * 2 + columns[1] - 1];
}
template <typename T>
Matrix<T, 4, 4> get_transposed_cofactors(const Matrix<T, 4, 4>& m)
{
Matrix<T, 4, 4> c;
std::array<T, 7> a = {};
for (unsigned k = 0; k < 4; k += 2)
{
// Calculate the six 2x2 determinants that are used to
// calculate the 3x3 determinants. A naive recursive
// solution would calculate these determinants 24 times.
for (unsigned i = 0; i < 3; ++i)
{
for (unsigned j = i + 1; j < 4; ++j)
{
auto p0 = m[{k, i}] * m[{k + 1, j}];
auto p1 = m[{k, j}] * m[{k + 1, i}];
a[i * 2 + j - 1] = p0 - p1;
}
}
int sign = 1;
for (unsigned i = 2 - k; i < 4 - k; ++i)
{
std::array<unsigned, 3> columns = {1, 2, 3};
auto other_row = 5 - 2 * k - i;
for (unsigned j = 0; j < 4; ++j)
{
c[{j, i}] = sign * get_cofactor(m, a, other_row, columns);
sign = -sign;
if (j < 3)
columns[j] = j;
}
sign = -sign;
}
}
return c;
}
}
template <typename T, unsigned N,
std::enable_if_t<N == 3 || N == 4, int> = 0>
Matrix<T, N, N> invert(const Matrix<T, N, N>& m)
{
auto c = Details::get_transposed_cofactors(m);
T det = 0;
for (unsigned i = 0; i < N; ++i)
det += m[{0, i}] * c[{i, 0}];
if (det == 0)
XYZ_THROW("The matrix is not invertible.");
c *= T(1) / det;
return c;
}
template <typename T>
Matrix<T, 1, 1> invert(const Matrix<T, 1, 1>& m)
{
if (m[{0, 0}] == 0)
XYZ_THROW("The matrix is not invertible.");
return Matrix<typename FloatType<T>::type, 1, 1>{
1.0 / m[{0, 0}]};
}
template <typename T>
Matrix<T, 2, 2> invert(const Matrix<T, 2, 2>& m)
{
using Float = typename FloatType<T>::type;
auto det = m[{0, 0}] * m[{1, 1}] - m[{0, 1}] * m[{1, 0}];
if (det == 0)
XYZ_THROW("The matrix is not invertible.");
auto w = Float(1) / det;
return Matrix<Float, 2, 2>{m[{1, 1}] * w, -m[{0, 1}] * w,
-m[{1, 0}] * w, m[{0, 0}] * w};
}
}
<file_sep>/tests/XyzTest/test_Vector.cpp
//****************************************************************************
// Copyright © 2015 <NAME>. All rights reserved.
// Created by <NAME> on 18.12.2015
//
// This file is distributed under the BSD License.
// License text is included with the source distribution.
//****************************************************************************
#include "Xyz/Vector.hpp"
#include <cmath>
#include <Ytest/Ytest.hpp>
#include "Xyz/Utilities.hpp"
#include "YtestUtilities.hpp"
namespace
{
using namespace Xyz;
void test_basics_2d()
{
auto u = make_vector2(2.0, 1.0);
auto v = make_vector2(3.0, 4.0);
Y_EQUAL(u[0], 2);
Y_EQUAL(u[1], 1);
Y_EQUAL(v[0], 3);
Y_EQUAL(v[1], 4);
Y_EQUAL(u * v, make_vector2<double>(6, 4));
Y_EQUAL(u + v, make_vector2<double>(5, 5));
Y_EQUAL(u - v, make_vector2<double>(-1, -3));
Y_EQUAL(u * 3., make_vector2<double>(6, 3));
Y_EQUAL(3. * u, make_vector2<double>(6, 3));
Y_EQUAL(u / 3., make_vector2<double>(2.0 / 3, 1.0 / 3));
Y_EQUAL(6. / u, make_vector2<double>(3, 6));
Y_EQUAL(u += v, make_vector2<double>(5, 5));
Y_EQUAL(u -= v, make_vector2<double>(2, 1));
Y_EQUAL(u *= 3., make_vector2<double>(6, 3));
Y_EQUAL(u /= 2., make_vector2<double>(3.0, 1.5));
Y_EQUAL(u *= v, make_vector2<double>(9, 6));
Y_EQUAL(u *= 2., make_vector2<double>(18, 12));
Y_EQUAL(u /= v, make_vector2<double>(6, 3));
Y_EQUAL(dot(u, v), 30);
Y_EQUIVALENT(get_length(v), 5, 1e-10);
}
void test_basics_4d()
{
auto u = make_vector4(2.0, 1.0, 4.0, 3.0);
auto v = make_vector4(3.0, 4.0, -1.0, -2.0);
Y_EQUAL(u[0], 2);
Y_EQUAL(u[1], 1);
Y_EQUAL(u[2], 4);
Y_EQUAL(u[3], 3);
Y_EQUAL(v[0], 3);
Y_EQUAL(v[1], 4);
Y_EQUAL(v[2], -1);
Y_EQUAL(v[3], -2);
Y_EQUAL(u * v, make_vector4<double>(6, 4, -4, -6));
Y_EQUAL(u + v, make_vector4<double>(5, 5, 3, 1));
Y_EQUAL(u - v, make_vector4<double>(-1, -3, 5, 5));
Y_EQUAL(u * 3., make_vector4<double>(6, 3, 12, 9));
Y_EQUAL(3. * u, make_vector4<double>(6, 3, 12, 9));
Y_EQUAL(u / 3., make_vector4<double>(2.0 / 3, 1.0 / 3, 4.0 / 3, 1.0));
Y_EQUAL(12. / u, make_vector4<double>(6, 12, 3, 4));
Y_EQUAL(u += v, make_vector4<double>(5, 5, 3, 1));
Y_EQUAL(u -= v, make_vector4<double>(2, 1, 4, 3));
Y_EQUAL(u *= 3., make_vector4<double>(6, 3, 12, 9));
Y_EQUAL(u /= 2., make_vector4<double>(3.0, 1.5, 6.0, 4.5));
Y_EQUAL(u *= v, make_vector4<double>(9, 6, -6, -9));
Y_EQUAL(u *= 2., make_vector4<double>(18, 12, -12, -18));
Y_EQUAL(u /= v, make_vector4<double>(6, 3, 12, 9));
Y_EQUAL(dot(u, v), 18 + 12 - 12 - 18);
Y_EQUIVALENT(get_length(v), sqrt(9 + 16 + 1 + 4), 1e-10);
}
void test_constructors()
{
auto u = make_vector4(1, 2, 0, 1);
Vector<double, 4> v = vector_cast<double>(u);
Y_EQUAL(v[0], 1);
Y_EQUAL(v[1], 2);
Y_EQUAL(v[2], 0);
Y_EQUAL(v[3], 1);
}
void test_cross()
{
auto u = make_vector3(1, 2, 3);
auto v = make_vector3(0, 1, 2);
Y_EQUAL(cross(u, v), make_vector3(1, -2, 1));
}
void test_get_ccw_angle()
{
Y_EQUIVALENT(get_ccw_angle(make_vector2(6, 6), make_vector2(4, -4)),
3 * Constants<double>::PI / 2, 1e-10);
}
void test_rotated()
{
auto sqrt2 = std::sqrt(2);
Y_EQUIVALENT(rotate(make_vector2<double>(100, 0), to_radians(30)),
make_vector2(50 * std::sqrt(3), 50.0),
1e-10);
Y_EQUIVALENT(rotate(make_vector2<double>(100, 0), to_radians(45)),
make_vector2(100 / sqrt2, 100 / sqrt2),
1e-10);
Y_EQUIVALENT(rotate(make_vector2<double>(100, 0), to_radians(60)),
make_vector2(50.0, 50 * std::sqrt(3)),
1e-10);
Y_EQUIVALENT(rotate(make_vector2<double>(0, 100), to_radians(-60)),
make_vector2(50 * std::sqrt(3), 50.0),
1e-10);
Y_EQUIVALENT(rotate(make_vector2<double>(0, 100), to_radians(-45)),
make_vector2(100 / sqrt2, 100 / sqrt2),
1e-10);
Y_EQUIVALENT(rotate(make_vector2<double>(0, 100), to_radians(-30)),
make_vector2(50.0, 50 * std::sqrt(3)),
1e-10);
Y_EQUIVALENT(rotate(make_vector2(1 / sqrt2, 1 / sqrt2), to_radians(45)),
make_vector2<double>(0, 1),
1e-10);
Y_EQUIVALENT(rotate(make_vector2(1 / sqrt2, 1 / sqrt2), to_radians(135)),
make_vector2<double>(-1, 0),
1e-10);
}
void test_types()
{
auto u = make_vector2(1, 2);
Y_EQUAL(typeid(typename decltype(u)::ValueType).name(),
typeid(int).name());
auto v = make_vector2(2.0, 4.0);
Y_EQUAL(typeid(typename decltype(v)::ValueType).name(),
typeid(double).name());
auto w = vector_cast<double>(u) + v;
Y_EQUAL(typeid(typename decltype(w)::ValueType).name(),
typeid(double).name());
}
Y_SUBTEST("Fundamentals",
test_basics_2d,
test_basics_4d,
test_constructors,
test_cross,
test_get_ccw_angle,
test_rotated,
test_types);
}
<file_sep>/include/Xyz/MatrixTransformations.hpp
//****************************************************************************
// Copyright © 2015 <NAME>. All rights reserved.
// Created by <NAME> on 2015-12-18.
//
// This file is distributed under the BSD License.
// License text is included with the source distribution.
//****************************************************************************
#pragma once
#include "Matrix.hpp"
namespace Xyz
{
template <typename T>
Matrix<T, 3, 3> scale3(T sx, T sy)
{
return Matrix<T, 3, 3>{
sx, 0, 0,
0, sy, 0,
0, 0, 1};
}
template <typename T>
Matrix<T, 3, 3> scale3(const Vector<T, 2>& scales)
{
return scale3(scales[0], scales[1]);
}
template <typename T>
Matrix<T, 4, 4> scale4(T sx, T sy, T sz)
{
return Matrix<T, 4, 4>{
sx, 0, 0, 0,
0, sy, 0, 0,
0, 0, sz, 0,
0, 0, 0, 1};
}
template <typename T>
Matrix<T, 4, 4> scale4(const Vector<T, 3>& scales)
{
return scale4(scales[0], scales[1], scales[2]);
}
template <typename T>
Matrix<T, 3, 3> rotate3(T angle)
{
auto c = std::cos(angle);
auto s = std::sin(angle);
return Matrix<T, 3, 3>{
c, -s, 0,
s, c, 0,
0, 0, 1};
}
/**
* @brief Rotation around the z axis.
*/
template <typename T>
Matrix<T, 4, 4> rotate_z(T angle)
{
auto c = std::cos(angle);
auto s = std::sin(angle);
return Matrix<T, 4, 4>{
c, -s, 0, 0,
s, c, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1};
}
template <typename T>
Matrix<T, 4, 4> rotate_y(T angle)
{
auto c = std::cos(angle);
auto s = std::sin(angle);
return Matrix<T, 4, 4>{
c, 0, s, 0,
0, 1, 0, 0,
-s, 0, c, 0,
0, 0, 0, 1};
}
template <typename T>
Matrix<T, 4, 4> rotate_x(T angle)
{
auto c = std::cos(angle);
auto s = std::sin(angle);
return Matrix<T, 4, 4>{
1, 0, 0, 0,
0, c, -s, 0,
0, s, c, 0,
0, 0, 0, 1};
}
template <typename T>
Matrix<T, 3, 3> translate3(T x, T y)
{
return Matrix<T, 3, 3>{
1, 0, x,
0, 1, y,
0, 0, 1};
}
template <typename T>
Matrix<T, 3, 3> translate3(const Vector<T, 2>& offsets)
{
return translate3(offsets[0], offsets[1]);
}
template <typename T>
Matrix<T, 4, 4> rotate4(T angle, const Vector<T, 3>& axis)
{
auto c = std::cos(angle);
auto s = std::sin(angle);
auto cx = axis * ((1 - c) * axis[0]);
auto cy = axis * ((1 - c) * axis[1]);
auto cz = axis * ((1 - c) * axis[2]);
auto sa = s * axis;
return Matrix<T, 4, 4>{
cx[0] + c, cx[1] - sa[2], cx[2] + sa[1], 0,
cy[0] + sa[2], cy[1] + c, cy[2] - sa[0], 0,
cz[0] - sa[1], cz[1] + sa[0], cz[2] + c, 0,
0, 0, 0, 1};
}
template <typename T>
Matrix<T, 4, 4> translate4(T x, T y, T z)
{
return Matrix<T, 4, 4>{
1, 0, 0, x,
0, 1, 0, y,
0, 0, 1, z,
0, 0, 0, 1};
}
template <typename T>
Matrix<T, 4, 4> translate4(const Vector<T, 3>& offsets)
{
return translate4(offsets[0], offsets[1], offsets[2]);
}
template <typename T>
Matrix<T, 3, 3> transposed_rotate3(T angle)
{
auto c = std::cos(angle);
auto s = std::sin(angle);
return Matrix<T, 3, 3>{
c, s, 0,
-s, c, 0,
0, 0, 1};
}
template <typename T>
Matrix<T, 3, 3> transposed_translate3(T x, T y)
{
return Matrix<T, 3, 3>{
1, 0, 0,
0, 1, 0,
x, y, 1};
}
template <typename T>
Matrix<T, 3, 3> transposed_translate3(const Vector<T, 2>& offsets)
{
return transpose_translate(offsets[0], offsets[1]);
}
template <typename T>
Matrix<T, 4, 4> transposed_rotate_z(T angle)
{
auto c = std::cos(angle);
auto s = std::sin(angle);
return Matrix<T, 4, 4>{
c, s, 0, 0,
-s, c, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1};
}
template <typename T>
Matrix<T, 4, 4> transposed_rotate_y(T angle)
{
auto c = std::cos(angle);
auto s = std::sin(angle);
return Matrix<T, 4, 4>{
c, 0, -s, 0,
0, 1, 0, 0,
s, 0, c, 0,
0, 0, 0, 1};
}
template <typename T>
Matrix<T, 4, 4> transposed_rotate_x(T angle)
{
auto c = std::cos(angle);
auto s = std::sin(angle);
return Matrix<T, 4, 4>{
1, 0, 0, 0,
0, c, s, 0,
0, -s, c, 0,
0, 0, 0, 1};
}
template <typename T>
Matrix<T, 4, 4> transposed_rotate4(T angle, const Vector<T, 3>& axis)
{
auto c = std::cos(angle);
auto s = std::sin(angle);
auto cx = axis * ((1 - c) * axis[0]);
auto cy = axis * ((1 - c) * axis[1]);
auto cz = axis * ((1 - c) * axis[2]);
auto sa = s * axis;
return Matrix<T, 4, 4>{
cx[0] + c, cy[0] + sa[2], cz[0] - sa[1], 0,
cx[1] - sa[2], cy[1] + c, cz[1] + sa[0], 0,
cx[2] + sa[1], cy[2] - sa[0], cz[2] + c, 0,
0, 0, 0, 1};
}
template <typename T>
Matrix<T, 4, 4> transposed_translate4(T x, T y, T z)
{
return Matrix<T, 4, 4>{
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
x, y, z, 1};
}
template <typename T>
Matrix<T, 4, 4> transposed_translate4(const Vector<T, 3>& offsets)
{
return transposed_translate4(offsets[0], offsets[1], offsets[2]);
}
}
<file_sep>/include/Xyz/Matrix.hpp
//****************************************************************************
// Copyright © 2015 <NAME>. All rights reserved.
// Created by <NAME> on 2015-12-18.
//
// This file is distributed under the BSD License.
// License text is included with the source distribution.
//****************************************************************************
#pragma once
#include "Vector.hpp"
namespace Xyz
{
struct RowCol
{
RowCol() = default;
RowCol(unsigned r, unsigned c) : row(r), col(c) {}
unsigned row = 0;
unsigned col = 0;
};
template <typename T, unsigned M, unsigned N>
class Matrix
{
public:
static constexpr size_t SIZE = M * N;
static constexpr size_t ROWS = M;
static constexpr size_t COLS = N;
constexpr Matrix()
: values()
{}
Matrix(std::initializer_list<T> v)
{
if (v.size() != SIZE)
XYZ_THROW("Incorrect number of arguments.");
std::copy(v.begin(), v.end(), std::begin(values));
}
explicit Matrix(T (&other)[SIZE])
{
std::copy(std::begin(other), std::end(other), std::begin(values));
}
Matrix(const Matrix& other)
{
std::copy(begin(other), end(other), std::begin(values));
}
Matrix& operator=(const Matrix& other)
{
if (&other == this)
return *this;
std::copy(begin(other), end(other), std::begin(values));
return *this;
}
constexpr T& operator[](RowCol pos)
{
return values[pos.row * COLS + pos.col];
}
constexpr T operator[](RowCol pos) const
{
return values[pos.row * COLS + pos.col];
}
T values[SIZE];
};
template <typename T>
class Matrix<T, 2, 2>
{
public:
static constexpr size_t SIZE = 2 * 2;
static constexpr size_t ROWS = 2;
static constexpr size_t COLS = 2;
constexpr Matrix()
: values()
{}
Matrix(T a11, T a12, T a21, T a22)
: values{a11, a12, a21, a22}
{}
explicit Matrix(T (&other)[SIZE])
{
std::copy(std::begin(other), std::end(other), std::begin(values));
}
Matrix(const Matrix& other)
{
std::copy(begin(other), end(other), std::begin(values));
}
Matrix& operator=(const Matrix& other)
{
if (&other == this)
return *this;
std::copy(begin(other), end(other), std::begin(values));
return *this;
}
constexpr T& operator[](RowCol pos)
{
return values[pos.row * COLS + pos.col];
}
constexpr T operator[](RowCol pos) const
{
return values[pos.row * COLS + pos.col];
}
T values[SIZE];
};
template <typename T>
class Matrix<T, 3, 3>
{
public:
static constexpr size_t SIZE = 3 * 3;
static constexpr size_t ROWS = 3;
static constexpr size_t COLS = 3;
constexpr Matrix()
: values()
{}
Matrix(T a11, T a12, T a13,
T a21, T a22, T a23,
T a31, T a32, T a33)
: values{a11, a12, a13,
a21, a22, a23,
a31, a32, a33}
{}
explicit Matrix(T (&other)[SIZE])
{
std::copy(std::begin(other), std::end(other), std::begin(values));
}
Matrix(const Matrix& other)
{
std::copy(begin(other), end(other), std::begin(values));
}
Matrix& operator=(const Matrix& other)
{
if (&other == this)
return *this;
std::copy(begin(other), end(other), std::begin(values));
return *this;
}
constexpr T& operator[](RowCol pos)
{
return values[pos.row * COLS + pos.col];
}
constexpr T operator[](RowCol pos) const
{
return values[pos.row * COLS + pos.col];
}
T values[SIZE];
};
template <typename T>
class Matrix<T, 4, 4>
{
public:
static constexpr size_t SIZE = 4 * 4;
static constexpr size_t ROWS = 4;
static constexpr size_t COLS = 4;
constexpr Matrix()
: values()
{}
Matrix(T a11, T a12, T a13, T a14,
T a21, T a22, T a23, T a24,
T a31, T a32, T a33, T a34,
T a41, T a42, T a43, T a44)
: values{a11, a12, a13, a14,
a21, a22, a23, a24,
a31, a32, a33, a34,
a41, a42, a43, a44}
{}
explicit Matrix(T (&other)[SIZE])
{
std::copy(std::begin(other), std::end(other), std::begin(values));
}
Matrix(const Matrix& other)
{
std::copy(begin(other), end(other), std::begin(values));
}
Matrix& operator=(const Matrix& other)
{
if (&other == this)
return *this;
std::copy(begin(other), end(other), std::begin(values));
return *this;
}
constexpr T& operator[](RowCol pos)
{
return values[pos.row * COLS + pos.col];
}
constexpr T operator[](RowCol pos) const
{
return values[pos.row * COLS + pos.col];
}
T values[SIZE];
};
template <typename T, unsigned M, unsigned N>
constexpr T* begin(Matrix<T, M, N>& m)
{
return std::begin(m.values);
}
template <typename T, unsigned M, unsigned N>
constexpr T* end(Matrix<T, M, N>& m)
{
return std::end(m.values);
}
template <typename T, unsigned M, unsigned N>
constexpr const T* begin(const Matrix<T, M, N>& m)
{
return std::begin(m.values);
}
template <typename T, unsigned M, unsigned N>
constexpr const T* end(const Matrix<T, M, N>& m)
{
return std::end(m.values);
}
template <typename T, unsigned M, unsigned N>
Vector<T, N> row(const Matrix<T, M, N>& m, unsigned r)
{
return Vector<T, N>(&m.values[r * N], N);
}
template <typename T, unsigned M, unsigned N>
void set_row(Matrix<T, M, N>& m, unsigned row, const Vector<T, N>& v)
{
std::copy(begin(v), end(v), m.values + row * N);
}
template <typename T, unsigned M, unsigned N>
void set_row(Matrix<T, M, N>& m, unsigned row,
const T* values, unsigned count)
{
if (count != N)
XYZ_THROW("Incorrect number of columns.");
std::copy(values, values + count, m.values + row * N);
}
template <typename T, unsigned M, unsigned N>
Vector<T, M> col(const Matrix<T, M, N>& m, unsigned c)
{
Vector<T, M> result;
auto ptr = m.values + c;
for (unsigned i = 0; i < M; ++i)
{
result[i] = *ptr;
ptr += N;
}
return result;
}
template <typename T, unsigned M, unsigned N>
void set_col(Matrix<T, M, N>& m, unsigned c, const Vector<T, M>& v)
{
auto ptr = m.values + c;
for (unsigned i = 0; i < M; ++i)
{
*ptr = v[i];
ptr += N;
}
}
template <typename T, unsigned M, unsigned N>
void set_col(Matrix<T, M, N>& m, unsigned c,
const T* values, unsigned count)
{
if (count != M)
XYZ_THROW("Incorrect number of columns.");
auto ptr = m.values + c;
for (unsigned i = 0; i < M; ++i)
{
*ptr = values[i];
ptr += N;
}
}
template <typename T, unsigned N>
Matrix<T, N, N> make_identity_matrix()
{
static Matrix<T, N, N> matrix;
if (matrix[{0, 0}] == 0)
{
for (unsigned i = 0; i < N; ++i)
matrix[{i, i}] = 1;
}
return matrix;
}
template <typename T, unsigned M, unsigned N>
Matrix<T, M, N> make_matrix_with_rows(const Vector<T, N>* rows,
unsigned count)
{
auto n = std::min(M, count);
Matrix<T, M, N> result;
for (unsigned i = 0; i < n; ++i)
result.set_row(i, rows[i]);
return result;
}
template <typename T, unsigned M, unsigned N>
Matrix<T, M, N> make_matrix_with_cols(const Vector<T, N>* cols,
unsigned count)
{
auto n = std::min(M, count);
Matrix<T, M, N> result;
for (unsigned i = 0; i < n; ++i)
result.set_col(i, cols[i]);
return result;
}
template <unsigned K, unsigned L, typename T, unsigned M, unsigned N>
Matrix<T, K, L> make_submatrix(const Matrix<T, M, N>& m,
unsigned i0, unsigned j0)
{
static_assert(K <= M && L <= N,
"The submatrix cannot be larger than the source matrix.");
Matrix<T, K, L> result;
for (unsigned i = 0; i < K; ++i)
{
auto i_m = (i + i0) % M;
for (unsigned j = 0; j < L; ++j)
{
auto j_m = (j + j0) % N;
result[{i, j}] = m[{i_m, j_m}];
}
}
return result;
}
template <typename T, unsigned N, unsigned M>
bool operator==(const Matrix<T, M, N>& a, const Matrix<T, M, N>& b)
{
for (auto i = 0u; i < M * N; ++i)
{
if (a.values[i] != b.values[i])
return false;
}
return true;
}
template <typename T, unsigned M, unsigned N>
bool operator!=(const Matrix<T, M, N>& a, const Matrix<T, M, N>& b)
{
return !(a == b);
}
template <typename T, unsigned M, unsigned N>
Matrix<T, M, N>& operator+=(Matrix<T, M, N>& a, const Matrix<T, M, N>& b)
{
auto a_data = a.values;
auto b_data = b.values;
for (auto i = 0u; i < M * N; ++i)
a_data[i] += b_data[i];
return a;
}
template <typename T, unsigned M, unsigned N>
Matrix<T, M, N> operator+(const Matrix<T, M, N>& a,
const Matrix<T, M, N>& b)
{
Matrix<T, M, N> c(a);
return c += b;
}
template <typename T, unsigned M, unsigned N>
Matrix<T, M, N>& operator-=(Matrix<T, M, N>& a, const Matrix<T, M, N>& b)
{
auto a_data = a.values;
auto b_data = b.values;
for (auto i = 0u; i < M * N; ++i)
a_data[i] -= b_data[i];
return a;
}
template <typename T, unsigned M, unsigned N>
Matrix<T, M, N> operator-(const Matrix<T, M, N>& a,
const Matrix<T, M, N>& b)
{
Matrix<T, M, N> c(a);
return c -= b;
}
template <typename T, unsigned M, unsigned N, unsigned O>
Matrix<T, M, N> operator*(const Matrix<T, M, N>& a,
const Matrix<T, N, O>& b)
{
Matrix<T, M, O> result;
for (auto i = 0u; i < M; ++i)
{
for (auto j = 0u; j < O; ++j)
{
T v = 0;
for (auto k = 0u; k < N; ++k)
v += a[{i, k}] * b[{k, j}];
result[{i, j}] = v;
}
}
return result;
}
template <typename T, unsigned M, unsigned N, unsigned O>
Matrix<T, M, O>& operator*=(Matrix<T, M, N>& a, const Matrix<T, N, O>& b)
{
return a = a * b;
}
template <typename T, unsigned M, unsigned N>
Matrix<T, M, N> operator*(Matrix<T, M, N> a, T s)
{
for (unsigned i = 0; i < M; ++i)
{
for (unsigned j = 0; j < N; ++j)
a[{i, j}] *= s;
}
return a;
}
template <typename T, unsigned M, unsigned N>
Matrix<T, M, N>& operator*=(Matrix<T, M, N>& a, T s)
{
for (auto& v : a)
v *= s;
return a;
}
template <typename T, unsigned M, unsigned N>
Matrix<T, M, N> operator*(T s, Matrix<T, M, N> a)
{
for (auto& v : a)
v *= s;
return a;
}
template <typename T, unsigned M, unsigned N>
Matrix<T, M, N>& operator/=(Matrix<T, M, N>& a, T s)
{
for (auto& v : a)
v /= s;
return a;
}
template <typename T, typename U, unsigned M, unsigned N>
Vector<decltype(T() * U()), M>
operator*(const Matrix<T, M, N>& m, const Vector<U, N>& v)
{
using R = decltype(T() * U());
Vector<R, M> result;
for (auto i = 0u; i < M; ++i)
{
auto value = R();
for (auto j = 0u; j < N; ++j)
value += m[{i, j}] * v[j];
result[i] = value;
}
return result;
}
template <typename T, typename U, unsigned M, unsigned N>
Vector<decltype(T() * U()), M>
operator*(const Vector<T, M>& v, const Matrix<U, M, N>& m)
{
using R = decltype(T() * U());
Vector<R, N> result;
for (auto i = 0u; i < N; ++i)
{
R value = 0;
for (auto j = 0u; j < M; ++j)
value += v[j] * m[{j, i}];
result[i] = value;
}
return result;
}
template <typename T, unsigned M, unsigned N>
std::ostream& operator<<(std::ostream& os, const Matrix<T, M, N>& m)
{
os << m[{0, 0}];
for (auto j = 1u; j < N; ++j)
os << " " << m[{0, j}];
for (auto i = 1u; i < M; ++i)
{
os << " |";
for (auto j = 0u; j < N; ++j)
os << " " << m[{i, j}];
}
return os;
}
template <typename T, unsigned M, unsigned N>
Matrix<T, N, M> transpose(const Matrix<T, M, N>& m)
{
Matrix<T, N, M> result;
for (auto i = 0u; i < N; ++i)
{
for (auto j = 0u; j < M; ++j)
result[{i, j}] = m[{j, i}];
}
return result;
}
template <typename T, unsigned N>
void transpose_inplace(Matrix<T, N, N>& m)
{
for (auto i = 0u; i < N; ++i)
{
for (auto j = i + 1; j < N; ++j)
std::swap(m[{i, j}], m[{j, i}]);
}
}
template <typename T, unsigned M, unsigned N, unsigned O>
Matrix<T, M, O> multiply_transposed(const Matrix<T, M, N>& a,
const Matrix<T, O, N>& b)
{
Matrix<T, M, O> result;
for (auto i = 0u; i < M; ++i)
{
for (auto j = 0u; j < N; ++j)
{
T v = 0;
for (auto k = 0u; k < N; ++k)
v += a[{i, k}] * b[{j, k}];
result[{i, j}] = v;
}
}
return result;
}
template <typename T, unsigned M, unsigned N,
typename std::enable_if_t<std::is_integral_v<T>, int> = 0>
bool are_equivalent(const Matrix<T, M, N>& a, const Matrix<T, M, N>& b,
T = 0)
{
return a == b;
}
template <typename T, unsigned N,
typename std::enable_if_t<std::is_floating_point_v<T>, int> = 0>
bool are_equivalent(const Matrix<T, N, N>& a, const Matrix<T, N, N>& b,
double margin = 1e-12)
{
for (unsigned i = 0; i < N; ++i)
{
for (unsigned j = 0; j < N; ++j)
{
if (std::abs(a[{i, j}] - b[{i, j}]) > margin)
return false;
}
}
return true;
}
using Matrix2I = Matrix<int, 2, 2>;
using Matrix2F = Matrix<float, 2, 2>;
using Matrix2D = Matrix<double, 2, 2>;
using Matrix3I = Matrix<int, 3, 3>;
using Matrix3F = Matrix<float, 3, 3>;
using Matrix3D = Matrix<double, 3, 3>;
using Matrix4I = Matrix<int, 4, 4>;
using Matrix4F = Matrix<float, 4, 4>;
using Matrix4D = Matrix<double, 4, 4>;
}
<file_sep>/include/Xyz/SphericalPoint.hpp
//****************************************************************************
// Copyright © 2022 <NAME>. All rights reserved.
// Created by <NAME> on 2022-12-26.
//
// This file is distributed under the BSD License.
// License text is included with the source distribution.
//****************************************************************************
#pragma once
#include "Utilities.hpp"
#include "Vector.hpp"
namespace Xyz
{
template <typename T>
struct SphericalPoint
{
constexpr SphericalPoint() = default;
constexpr SphericalPoint(T radius, T azimuth, T polar)
: radius(radius),
azimuth(azimuth),
polar(polar)
{}
T radius = {};
T azimuth = {};
T polar = {};
};
template <typename T>
constexpr bool operator==(const SphericalPoint<T>& a,
const SphericalPoint<T>& b)
{
return a.radius == b.radius
&& a.azimuth == b.azimuth
&& a.polar == b.polar;
}
template <typename T>
constexpr bool operator!=(const SphericalPoint<T>& a,
const SphericalPoint<T>& b)
{
return !(a == b);
}
template <typename T>
std::ostream& operator<<(std::ostream& os, const SphericalPoint<T>& sp)
{
return os << '[' << sp.radius
<< ", " << sp.azimuth
<< ", " << sp.polar << ']';
}
/**
* @brief Returns the spherical point corresponding to the
* cartesian point @a p.
*/
template <typename T>
SphericalPoint<T> to_spherical(const Vector<T, 3>& p)
{
auto [x, y, z] = p;
auto length = get_length(p);
if (length == 0)
return {};
T theta;
if (y > 0)
theta = Constants<T>::PI / 2 - atan(x / y);
else if (y < 0)
theta = -Constants<T>::PI / 2 - atan(x / y);
else if (x >= 0)
theta = 0;
else
theta = Constants<T>::PI;
auto phi = asin(z / length);
return {length, theta, phi};
}
template <typename T>
Vector<T, 3> to_cartesian(const SphericalPoint<T>& s)
{
return {s.radius * cos(s.azimuth) * cos(s.polar),
s.radius * sin(s.azimuth) * cos(s.polar),
s.radius * sin(s.polar)};
}
template <typename T>
SphericalPoint<T> to_degrees(const SphericalPoint<T>& sp)
{
return {sp.radius, to_degrees(sp.azimuth), to_degrees(sp.polar)};
}
template <typename T>
SphericalPoint<T> to_radians(const SphericalPoint<T>& sp)
{
return {sp.radius, to_radians(sp.azimuth), to_radians(sp.polar)};
}
using SphericalPointF = SphericalPoint<float>;
using SphericalPointD = SphericalPoint<double>;
}
<file_sep>/include/Xyz/ComplexApprox.hpp
//****************************************************************************
// Copyright © 2022 <NAME>. All rights reserved.
// Created by <NAME> on 2022-12-29.
//
// This file is distributed under the BSD License.
// License text is included with the source distribution.
//****************************************************************************
#pragma once
#include <algorithm>
#include <complex>
#include <cstdlib>
#include <limits>
#include <type_traits>
namespace Xyz
{
template <typename T,
std::enable_if_t<std::is_floating_point_v<T>, int> = 0>
struct ComplexApprox
{
constexpr static T DEFAULT_MARGIN = 100 * std::numeric_limits<T>::epsilon();
constexpr explicit ComplexApprox(
std::complex<T> value,
std::complex<T> margin = {DEFAULT_MARGIN, DEFAULT_MARGIN}
) noexcept
: value(value),
margin(margin)
{}
std::complex<T> value;
std::complex<T> margin;
};
namespace Details
{
template <typename T, typename U,
std::enable_if_t<std::is_same_v<T, decltype(T() + U())>, int> = 0>
ComplexApprox<T>
merge_margins(const ComplexApprox<T>& a, const ComplexApprox<U>& b)
{
std::complex<T> margin = {
std::max(a.margin.real(), b.margin.real()),
std::max(a.margin.imag(), b.margin.imag())
};
return ComplexApprox<T>(a.value, margin);
}
template <typename T, typename U,
std::enable_if_t<!std::is_same_v<T, decltype(T() + U())>, int> = 0>
auto merge_margins(const ComplexApprox<T>& a, const ComplexApprox<U>& b)
-> ComplexApprox<decltype(T() + U())>
{
using V = decltype(T() + U());
std::complex<V> margin = {
std::max<V>(a.margin.real(), b.margin.real()),
std::max<V>(a.margin.imag(), b.margin.imag())
};
return ComplexApprox<V>(
std::complex<V>(a.value.real(), a.value.imag()),
margin
);
}
}
template <typename T, typename U,
std::enable_if_t<std::is_arithmetic_v<U>, int> = 0>
constexpr bool operator==(std::complex<U> v, const ComplexApprox<T>& m)
{
return v.real() + m.margin.real() >= m.value.real()
&& m.value.real() + m.margin.real() >= v.real()
&& v.imag() + m.margin.imag() >= m.value.imag()
&& m.value.imag() + m.margin.imag() >= v.imag();
}
template <typename T, typename U,
std::enable_if_t<std::is_arithmetic_v<U>, int> = 0>
constexpr bool operator==(const ComplexApprox<T>& m, std::complex<U> v)
{
return v.real() + m.margin.real() >= m.value.real()
&& m.value.real() + m.margin.real() >= v.real()
&& v.imag() + m.margin.imag() >= m.value.imag()
&& m.value.imag() + m.margin.imag() >= v.imag();
}
template <typename T, typename U>
constexpr bool operator==(const ComplexApprox<T>& a, const ComplexApprox<U>& b)
{
return Details::merge_margins(a, b) == b.value;
}
template <typename T, typename U,
std::enable_if_t<std::is_arithmetic_v<U>, int> = 0>
constexpr bool operator!=(std::complex<U> v, const ComplexApprox<T>& m)
{
return !(v == m);
}
template <typename T, typename U,
std::enable_if_t<std::is_arithmetic_v<U>, int> = 0>
constexpr bool operator!=(const ComplexApprox<T>& m, std::complex<U> v)
{
return !(m == v);
}
template <typename T, typename U>
constexpr bool operator!=(const ComplexApprox<T>& a, const ComplexApprox<U>& b)
{
return !(a == b);
}
}
<file_sep>/src/Xyz/Face.cpp
//****************************************************************************
// Copyright © 2022 <NAME>. All rights reserved.
// Created by <NAME> on 2022-06-03.
//
// This file is distributed under the BSD License.
// License text is included with the source distribution.
//****************************************************************************
#include "Xyz/Face.hpp"
#include <array>
namespace Xyz
{
HalfEdge get_half_edge(const Face& face, uint32_t edge_index)
{
switch (edge_index)
{
case 0:
return {face[0], face[1], face.id() * 3};
case 1:
return {face[1], face[2], face.id() * 3 + 1};
case 2:
return {face[2], face[0], face.id() * 3 + 2};
default:
return {};
}
}
std::array<HalfEdge, 3> make_half_edges(const Face& face)
{
return {{{face[0], face[1], face.id() * 3},
{face[1], face[2], face.id() * 3 + 1},
{face[2], face[0], face.id() * 3 + 2}}};
}
}
<file_sep>/include/Xyz/ProjectionMatrix.hpp
//****************************************************************************
// Copyright © 2016 <NAME>. All rights reserved.
// Created by <NAME> on 2016-01-01.
//
// This file is distributed under the BSD License.
// License text is included with the source distribution.
//****************************************************************************
#pragma once
#include "Matrix.hpp"
namespace Xyz
{
template <typename T>
Matrix<T, 4, 4> make_look_at_matrix(const Vector<T, 3>& eye,
const Vector<T, 3>& center,
const Vector<T, 3>& up)
{
auto f = get_unit(center - eye);
auto s = cross(f, get_unit(up));
auto u = cross(s, f);
return Matrix<T, 4, 4>{
s[0], s[1], s[2], dot(-s, eye),
u[0], u[1], u[2], dot(-u, eye),
-f[0], -f[1], -f[2], dot(f, eye),
0, 0, 0, 1};
}
template <typename T>
Matrix<T, 4, 4> make_frustum_matrix(T l, T r, T b, T t, T n, T f)
{
return Matrix<T, 4, 4>{
2 * n / (r - l), 0, (r + l) / (r - l), 0,
0, 2 * n / (t - b), (t + b) / (t - b), 0,
0, 0, -(f + n) / (f - n), -2 * f * n / (f - n),
0, 0, -1, 0};
}
template <typename T>
Matrix<T, 4, 4> make_orthographic_matrix(T l, T r, T b, T t, T n, T f)
{
return Matrix<T, 4, 4> {
2 / (r - l), 0, 0, -(r + l) / (r - l),
0, 2 / (t - b), 0, -(t + b) / (t - b),
0, 0, -2 / (f - n), -(f + n) / (f - n),
0, 0, 0, 1};
}
}
<file_sep>/tests/XyzTest/test_CoordinateSystem.cpp
//****************************************************************************
// Copyright © 2019 <NAME>. All rights reserved.
// Created by <NAME> on 2019-08-11.
//
// This file is distributed under the BSD License.
// License text is included with the source distribution.
//****************************************************************************
#include <Ytest/Ytest.hpp>
#include <Xyz/Vector.hpp>
#include <Xyz/CoordinateSystem.hpp>
namespace
{
void test_roundtrip()
{
Xyz::CoordinateSystem<double> sys({1, 1, 0}, {1, 1, 0},
{-1, 1, 0}, {0, 0, 2});
Xyz::Vector3D p0 = {2, 0, 0.5};
auto p1 = sys.from_world_pos(p0);
auto p2 = sys.to_world_pos(p1);
Y_EQUAL(p0, p2);
}
void test_something()
{
Xyz::CoordinateSystem<double> sys({0, 0, 0}, {0, 1, 0},
{0, 0, 1}, {1, 0, 0});
Xyz::Vector3D p1 = {-1, -1, 1};
auto p2 = sys.to_world_pos(p1);
Y_EQUAL(p2, Xyz::make_vector3(1, -1, -1));
}
Y_TEST(test_roundtrip, test_something);
}
<file_sep>/tests/CatchXyzTest/test_QuadraticEquation.cpp
//****************************************************************************
// Copyright © 2022 <NAME>. All rights reserved.
// Created by <NAME> on 2022-12-29.
//
// This file is distributed under the BSD License.
// License text is included with the source distribution.
//****************************************************************************
#include <Xyz/ComplexApprox.hpp>
#include <Xyz/QuadraticEquation.hpp>
#include <catch2/catch_test_macros.hpp>
TEST_CASE("Find real solution to quadratic equation")
{
auto solution = Xyz::solve_real_quadratic_equation(4, 18, -70);
REQUIRE(bool(solution));
auto [lo, hi] = *solution;
REQUIRE(lo == -7);
REQUIRE(hi == 2.5);
}
TEST_CASE("Equation without real solution")
{
auto solution = Xyz::solve_real_quadratic_equation(1, 0, 1);
REQUIRE_FALSE(bool(solution));
}
TEST_CASE("Equation with complex solution")
{
auto [lo, hi] = Xyz::solve_complex_quadratic_equation<double>(1, 0, 1);
using C = std::complex<double>;
REQUIRE(lo == Xyz::ComplexApprox<double>(C(0., -1.)));
REQUIRE(hi == Xyz::ComplexApprox<double>(C(0., 1.)));
}
<file_sep>/include/Xyz/QuadraticEquation.hpp
//****************************************************************************
// Copyright © 2022 <NAME>. All rights reserved.
// Created by <NAME> on 2022-12-29.
//
// This file is distributed under the BSD License.
// License text is included with the source distribution.
//****************************************************************************
#pragma once
#include <cmath>
#include <complex>
#include <optional>
#include <type_traits>
namespace Xyz
{
template <typename T, typename RESULT = decltype(sqrt(T()))>
constexpr std::optional<std::pair<RESULT, RESULT>>
solve_real_quadratic_equation(T a, T b, T c)
{
auto bb = b * b;
auto ac4 = a * c * 4;
if (bb < ac4)
return {};
auto root = sqrt(bb - ac4);
return std::pair{(-b - root) / (2 * a), (-b + root) / (2 * a)};
}
template <typename T,
std::enable_if_t<std::is_floating_point_v<T>, int> = 0>
constexpr std::pair<std::complex<T>, std::complex<T>>
solve_complex_quadratic_equation(std::complex<T> a,
std::complex<T> b,
std::complex<T> c)
{
auto root = sqrt(b * b - T(4) * a * c);
return std::pair{(-b - root) / (T(2) * a), (-b + root) / (T(2) * a)};
}
}
<file_sep>/misc/removed/Xyz/Geometry/_Distance.hpp
#ifndef JEBMATH_GEOMETRY_DISTANCE_HPP
#define JEBMATH_GEOMETRY_DISTANCE_HPP
#include "Xyz/Line.hpp"
#include "LineString.hpp"
namespace JEBMath {
template <typename T, typename U, unsigned N>
double getDistance(const Vector<T, N>& a, const Vector<U, N>& b);
template <typename T, typename U>
double getDistance(const Line<T, 2>& l, const Vector<U, 2>& p);
template <typename T, typename U>
double getDistance(const Vector<T, 2>& p, const Line<U, 2>& l);
template <typename T, typename U>
double getDistance(const LineSegment<T, 2>& ls, const Vector<U, 2>& p);
template <typename T, typename U>
double getDistance(const Vector<T, 2>& p, const LineSegment<U, 2>& ls);
}
#include "Distance-impl.hpp"
#endif
<file_sep>/include/Xyz/Utilities.hpp
//****************************************************************************
// Copyright © 2015 <NAME>. All rights reserved.
// Created by <NAME> on 2015-12-18.
//
// This file is distributed under the BSD License.
// License text is included with the source distribution.
//****************************************************************************
#pragma once
#include <cmath>
#include "Constants.hpp"
namespace Xyz
{
template <typename T>
constexpr auto to_radians(T degrees)
{
return degrees * Constants<T>::PI / 180;
}
template <typename T>
constexpr auto to_degrees(T radians)
{
return radians * 180 / Constants<T>::PI;
}
/**
* @brief Returns the @a radians as an angle between 0 and 2*PI.
*/
template <typename T>
constexpr auto to_principal_angle(T radians)
{
auto result = fmod(radians, T(2) * Constants<T>::PI);
return result >= 0 ? result : result + T(2) * Constants<T>::PI;
}
}
<file_sep>/tests/CatchXyzTest/test_Approx.cpp
//****************************************************************************
// Copyright © 2021 <NAME>. All rights reserved.
// Created by <NAME> on 2021-03-24.
//
// This file is distributed under the BSD License.
// License text is included with the source distribution.
//****************************************************************************
#include <Xyz/Approx.hpp>
#include <catch2/catch_test_macros.hpp>
TEST_CASE("Approx comparisons")
{
Xyz::Approx<double> a(1, 0.01);
REQUIRE(a == 0.991);
REQUIRE_FALSE(a == 0.989);
REQUIRE(a != 0.989);
REQUIRE_FALSE(a != 0.991);
REQUIRE(a == 1.009);
REQUIRE_FALSE(a == 1.011);
REQUIRE(a != 1.011);
REQUIRE_FALSE(a != 1.009);
REQUIRE(a > 0.989);
REQUIRE_FALSE(a > 0.991);
REQUIRE(1.011 > a);
REQUIRE_FALSE(1.009 > a);
REQUIRE(a < 1.011);
REQUIRE_FALSE(a < 1.009);
REQUIRE(0.989 < a);
REQUIRE_FALSE(0.991 < a);
REQUIRE(a >= 1.009);
REQUIRE_FALSE(a >= 1.011);
REQUIRE(0.991 >= a);
REQUIRE_FALSE(0.989 >= a);
REQUIRE(1.009 <= a);
REQUIRE_FALSE(1.011 <= a);
REQUIRE(a <= 0.991);
REQUIRE_FALSE(a <= 0.989);
}
TEST_CASE("Approx-Approx comparisons")
{
Xyz::Approx<float> a(1.0, 0.01);
Xyz::Approx<double> b(1.009, 0.003);
Xyz::Approx<double> c(1.011, 0.003);
REQUIRE(a == b);
REQUIRE(b == c);
REQUIRE_FALSE(a == c);
}
<file_sep>/src/Xyz/SimplexNoise.cpp
//****************************************************************************
// Copyright © 2022 <NAME>. All rights reserved.
// Created by <NAME> on 2022-05-07.
//
// This file is distributed under the BSD License.
// License text is included with the source distribution.
//****************************************************************************
#include "Xyz/SimplexNoise.hpp"
#include <algorithm>
// Copied (and slightly adapted)
// from https://gist.github.com/Flafla2/f0260a861be0ebdeef76
namespace
{
// Hash lookup table as defined by Ken SimplexNoise. This is a randomly
// arranged array of all numbers from 0-255 inclusive.
uint8_t PERMUTATION[256] = {
151, 160, 137, 91, 90, 15, 131, 13, 201, 95, 96, 53, 194, 233,
7, 225, 140, 36, 103, 30, 69, 142, 8, 99, 37, 240, 21, 10,
23, 190, 6, 148, 247, 120, 234, 75, 0, 26, 197, 62, 94, 252,
219, 203, 117, 35, 11, 32, 57, 177, 33, 88, 237, 149, 56, 87,
174, 20, 125, 136, 171, 168, 68, 175, 74, 165, 71, 134, 139, 48,
27, 166, 77, 146, 158, 231, 83, 111, 229, 122, 60, 211, 133, 230,
220, 105, 92, 41, 55, 46, 245, 40, 244, 102, 143, 54, 65, 25,
63, 161, 1, 216, 80, 73, 209, 76, 132, 187, 208, 89, 18, 169,
200, 196, 135, 130, 116, 188, 159, 86, 164, 100, 109, 198, 173, 186,
3, 64, 52, 217, 226, 250, 124, 123, 5, 202, 38, 147, 118, 126,
255, 82, 85, 212, 207, 206, 59, 227, 47, 16, 58, 17, 182, 189,
28, 42, 223, 183, 170, 213, 119, 248, 152, 2, 44, 154, 163, 70,
221, 153, 101, 155, 167, 43, 172, 9, 129, 22, 39, 253, 19, 98,
108, 110, 79, 113, 224, 232, 178, 185, 112, 104, 218, 246, 97, 228,
251, 34, 242, 193, 238, 210, 144, 12, 191, 179, 162, 241, 81, 51,
145, 235, 249, 14, 239, 107, 49, 192, 214, 31, 181, 199, 106, 157,
184, 84, 204, 176, 115, 121, 50, 45, 127, 4, 150, 254, 138, 236,
205, 93, 222, 114, 67, 29, 24, 72, 243, 141, 128, 195, 78, 66,
215, 61, 156, 180
};
double gradient(int hash, double x, double y, double z)
{
switch (hash & 0xF)
{
case 0x0: return x + y;
case 0x1: return -x + y;
case 0x2: return x - y;
case 0x3: return -x - y;
case 0x4: return x + z;
case 0x5: return -x + z;
case 0x6: return x - z;
case 0x7: return -x - z;
case 0x8: return y + z;
case 0x9: return -y + z;
case 0xA: return y - z;
case 0xB: return -y - z;
case 0xC: return y + x;
case 0xD: return -y + z;
case 0xE: return y - x;
case 0xF: return -y - z;
default: return 0;
}
}
double fade(double t)
{
// Fade function as defined by Ken SimplexNoise. This eases coordinate
// values so that they will "ease" towards integral values.
// This ends up smoothing the final output.
return t * t * t * (t * (t * 6 - 15) + 10); // 6t^5 - 15t^4 + 10t^3
}
double lerp(double a, double b, double x)
{
return a + x * (b - a);
}
}
SimplexNoise::SimplexNoise()
{
std::copy(std::begin(PERMUTATION), std::end(PERMUTATION),
permutation_);
std::copy(std::begin(PERMUTATION), std::end(PERMUTATION),
permutation_ + 256);
}
double SimplexNoise::simplex(double x, double y, double z)
{
// Calculate the "unit cube" that the point asked will be located in.
// The left bound is ( |_x_|,|_y_|,|_z_| ) and the right bound is that
// plus 1. Next we calculate the location (from 0.0 to 1.0) in that cube.
// We also fade the location to smooth the result.
int xi = int(x) & 255;
int yi = int(y) & 255;
int zi = int(z) & 255;
double xf = x - int(x);
double yf = y - int(y);
double zf = z - int(z);
double u = fade(xf);
double v = fade(yf);
double w = fade(zf);
int aaa, aba, aab, abb, baa, bba, bab, bbb;
aaa = permutation_[permutation_[permutation_[xi] + yi] + zi];
aba = permutation_[permutation_[permutation_[xi] + ++yi] + zi];
aab = permutation_[permutation_[permutation_[xi] + yi] + ++zi];
abb = permutation_[permutation_[permutation_[xi] + ++yi] + ++zi];
baa = permutation_[permutation_[permutation_[++xi] + yi] + zi];
bba = permutation_[permutation_[permutation_[++xi] + ++yi] + zi];
bab = permutation_[permutation_[permutation_[++xi] + yi] + ++zi];
bbb = permutation_[permutation_[permutation_[++xi] + ++yi] + ++zi];
double x1, x2, y1, y2;
// The gradient function calculates the dot product between a pseudorandom
// gradient vector and the vector from the input coordinate to the 8
// surrounding points in its unit cube.
x1 = lerp(gradient(aaa, xf, yf, zf),
gradient(baa, xf - 1, yf, zf),
u);
// This is all then lerped together as a sort of weighted average based on
// the faded (u,v,w) values we made earlier.
x2 = lerp(gradient(aba, xf, yf - 1, zf),
gradient(bba, xf - 1, yf - 1, zf),
u);
y1 = lerp(x1, x2, v);
x1 = lerp(gradient(aab, xf, yf, zf - 1),
gradient(bab, xf - 1, yf, zf - 1),
u);
x2 = lerp(gradient(abb, xf, yf - 1, zf - 1),
gradient(bbb, xf - 1, yf - 1, zf - 1),
u);
y2 = lerp(x1, x2, v);
// For convenience, we reduce it to 0 - 1 (theoretical min/max before
// is -1 - 1)
return (lerp(y1, y2, w) + 1) / 2;
}
double SimplexNoise::simplex(double x, double y, double z,
int octaves, double persistence)
{
double total = 0;
double frequency = 1;
double amplitude = 1;
double max_value = 0;
for (int i = 0; i < octaves; i++)
{
total += simplex(x * frequency, y * frequency, z * frequency)
* amplitude;
max_value += amplitude;
amplitude *= persistence;
frequency *= 2;
}
return total / max_value;
}
<file_sep>/tests/CatchXyzTest/CMakeLists.txt
##****************************************************************************
## Copyright © 2015 <NAME>. All rights reserved.
## Created by <NAME> on 2015-12-18.
##
## This file is distributed under the Simplified BSD License.
## License text is included with the source distribution.
##****************************************************************************
cmake_minimum_required(VERSION 3.20)
include(FetchContent)
FetchContent_Declare(catch
GIT_REPOSITORY "https://github.com/catchorg/Catch2.git"
GIT_TAG "v3.4.0"
)
FetchContent_MakeAvailable(catch)
add_executable(CatchXyzTest
test_Approx.cpp
test_ComplexApprox.cpp
test_Mesh.cpp
test_QuadraticEquation.cpp
test_Rect.cpp
test_Vector.cpp
)
target_link_libraries(CatchXyzTest
Xyz::Xyz
Catch2::Catch2WithMain
)
add_test(NAME CatchXyzTest COMMAND CatchXyzTest)
<file_sep>/include/Xyz/Vector.hpp
//****************************************************************************
// Copyright © 2015 <NAME>. All rights reserved.
// Created by <NAME> on 2015-12-18.
//
// This file is distributed under the BSD License.
// License text is included with the source distribution.
//****************************************************************************
#pragma once
#include <algorithm>
#include <cmath>
#include <initializer_list>
#include <ostream>
#include <type_traits>
#include "Clamp.hpp"
#include "Constants.hpp"
#include "FloatType.hpp"
#include "XyzException.hpp"
namespace Xyz
{
template <typename T, unsigned N>
class Vector
{
public:
using ValueType = T;
static constexpr size_t SIZE = N;
constexpr Vector() noexcept
: values()
{}
constexpr Vector(std::initializer_list<T> v)
{
if (v.size() != SIZE)
XYZ_THROW("Incorrect number of arguments.");
std::copy(v.begin(), v.end(), std::begin(values));
}
explicit Vector(T const (& arr)[N]) noexcept
{
std::copy(std::begin(arr), std::end(arr), std::begin(values));
}
constexpr Vector(const Vector& other) noexcept
{
std::copy(std::begin(other.values), std::end(other.values),
std::begin(values));
}
Vector& operator=(const Vector& other)
{
if (this != &other)
{
std::copy(std::begin(other.values), std::end(other.values),
std::begin(values));
}
return *this;
}
constexpr T operator[](unsigned i) const
{
return values[i];
}
constexpr T& operator[](unsigned i)
{
return values[i];
}
T values[SIZE];
};
template <typename T>
class Vector<T, 2>
{
public:
using ValueType = T;
static constexpr size_t SIZE = 2;
constexpr Vector() noexcept
: values()
{}
constexpr Vector(T x, T y) noexcept
: values{x, y}
{}
explicit constexpr Vector(T const (& arr)[2]) noexcept
{
std::copy(std::begin(arr), std::end(arr), std::begin(values));
}
constexpr Vector(const Vector& other) noexcept
{
std::copy(std::begin(other.values), std::end(other.values),
std::begin(values));
}
constexpr Vector& operator=(const Vector& other)
{
if (this != &other)
{
std::copy(std::begin(other.values), std::end(other.values),
std::begin(values));
}
return *this;
}
constexpr T operator[](unsigned i) const
{
return values[i];
}
constexpr T& operator[](unsigned i)
{
return values[i];
}
T values[2];
};
template <typename T>
class Vector<T, 3>
{
public:
using ValueType = T;
static constexpr size_t SIZE = 3;
constexpr Vector() noexcept
: values()
{}
constexpr Vector(T x, T y, T z) noexcept
: values{x, y, z}
{}
explicit Vector(T const (& arr)[3]) noexcept
{
std::copy(std::begin(arr), std::end(arr), std::begin(values));
}
constexpr Vector(const Vector& other) noexcept
{
std::copy(std::begin(other.values), std::end(other.values),
std::begin(values));
}
constexpr Vector& operator=(const Vector& other)
{
if (this != &other)
{
std::copy(std::begin(other.values), std::end(other.values),
std::begin(values));
}
return *this;
}
constexpr T operator[](unsigned i) const
{
return values[i];
}
constexpr T& operator[](unsigned i)
{
return values[i];
}
T values[3];
};
template <typename T>
class Vector<T, 4>
{
public:
using ValueType = T;
static constexpr size_t SIZE = 4;
constexpr Vector() noexcept
: values()
{}
constexpr Vector(T x, T y, T z, T w) noexcept
: values{x, y, z, w}
{}
explicit Vector(T const (& arr)[4]) noexcept
{
std::copy(std::begin(arr), std::end(arr), std::begin(values));
}
constexpr Vector(const Vector& other) noexcept
{
std::copy(std::begin(other.values), std::end(other.values),
std::begin(values));
}
constexpr Vector& operator=(const Vector& other)
{
if (this != &other)
{
std::copy(std::begin(other.values), std::end(other.values),
std::begin(values));
}
return *this;
}
constexpr T operator[](unsigned i) const
{
return values[i];
}
constexpr T& operator[](unsigned i)
{
return values[i];
}
T values[4];
};
template <typename T, unsigned N>
constexpr T* begin(Vector<T, N>& v)
{
return std::begin(v.values);
}
template <typename T, unsigned N>
constexpr T* end(Vector<T, N>& v)
{
return std::end(v.values);
}
template <typename T, unsigned N>
constexpr const T* begin(const Vector<T, N>& v)
{
return std::begin(v.values);
}
template <typename T, unsigned N>
constexpr const T* end(const Vector<T, N>& v)
{
return std::end(v.values);
}
template <typename T, typename S, unsigned N>
constexpr bool operator==(const Vector<T, N>& u, const Vector<S, N>& v)
{
for (unsigned i = 0; i < N; ++i)
{
if (u[i] != v[i])
return false;
}
return true;
}
template <typename T, typename S, unsigned N>
constexpr bool operator!=(const Vector<T, N>& u, const Vector<S, N>& v)
{
return !(u == v);
}
template <typename T, unsigned N>
constexpr Vector<T, N>& operator+=(Vector<T, N>& u, const Vector<T, N>& v)
{
for (unsigned i = 0; i < N; ++i)
u[i] += v[i];
return u;
}
template <typename T, unsigned N>
constexpr Vector<T, N>& operator+=(Vector<T, N>& u, T scalar)
{
for (unsigned i = 0; i < N; ++i)
u[i] += scalar;
return u;
}
template <typename T, unsigned N>
constexpr Vector<T, N>& operator-=(Vector<T, N>& u, const Vector<T, N>& v)
{
for (unsigned i = 0; i < N; ++i)
u[i] -= v[i];
return u;
}
template <typename T, unsigned N>
constexpr Vector<T, N>& operator-=(Vector<T, N>& u, T scalar)
{
for (unsigned i = 0; i < N; ++i)
u[i] -= scalar;
return u;
}
template <typename T, unsigned N>
constexpr Vector<T, N>& operator*=(Vector<T, N>& u, T scalar)
{
for (unsigned i = 0; i < N; ++i)
u[i] *= scalar;
return u;
}
template <typename T, unsigned N>
constexpr Vector<T, N>& operator*=(Vector<T, N>& u, const Vector<T, N>& v)
{
for (unsigned i = 0; i < N; ++i)
u[i] *= v[i];
return u;
}
template <typename T, unsigned N>
constexpr Vector<T, N>& operator/=(Vector<T, N>& u, T scalar)
{
for (unsigned i = 0; i < N; ++i)
u[i] /= scalar;
return u;
}
template <typename T, unsigned N>
constexpr Vector<T, N>& operator/=(Vector<T, N>& u, const Vector<T, N>& v)
{
for (unsigned i = 0; i < N; ++i)
u[i] /= v[i];
return u;
}
template <typename T, unsigned N>
constexpr Vector<T, N> operator-(Vector<T, N> v)
{
for (unsigned i = 0; i < N; ++i)
v[i] = -v[i];
return v;
}
template <typename T, typename S, unsigned N>
constexpr auto operator+(const Vector<T, N>& u, const Vector<S, N>& v)
{
Vector<decltype(T() * S()), N> w;
for (unsigned i = 0; i < N; ++i)
w[i] = u[i] + v[i];
return w;
}
template <typename T, typename S, unsigned N>
constexpr auto operator-(const Vector<T, N>& u, const Vector<S, N>& v)
{
Vector<decltype(T() * S()), N> w;
for (unsigned i = 0; i < N; ++i)
w[i] = u[i] - v[i];
return w;
}
template <typename T, unsigned N>
constexpr auto operator/(const Vector<T, N>& u, T scalar)
{
Vector<T, N> w;
for (unsigned i = 0; i < N; ++i)
w[i] = u[i] / scalar;
return w;
}
template <typename T, unsigned N>
constexpr auto operator/(T scalar, const Vector<T, N>& u)
{
Vector<T, N> w;
for (unsigned i = 0; i < N; ++i)
w[i] = scalar / u[i];
return w;
}
template <typename T, unsigned N>
Vector<T, N> operator/(const Vector<T, N>& u, const Vector<T, N>& v)
{
Vector<T, N> w;
for (unsigned i = 0; i < N; ++i)
w[i] = u[i] / v[i];
return w;
}
template <typename T, unsigned N>
constexpr auto operator*(const Vector<T, N>& u, T scalar)
{
Vector<T, N> w;
for (unsigned i = 0; i < N; ++i)
w[i] = u[i] * scalar;
return w;
}
template <typename T, unsigned N>
constexpr auto operator*(T scalar, const Vector<T, N>& v)
{
return v * scalar;
}
template <typename T, unsigned N>
constexpr auto operator*(const Vector<T, N>& u, const Vector<T, N>& v)
{
Vector<T, N> w;
for (unsigned i = 0; i < N; ++i)
w[i] = u[i] * v[i];
return w;
}
template <typename T, unsigned N>
std::ostream& operator<<(std::ostream& os, const Vector<T, N>& v)
{
const T* it = begin(v);
os << "[" << *it;
while (++it != end(v))
os << ", " << *it;
return os << "]";
}
template <typename T>
[[nodiscard]]
constexpr Vector<T, 2> make_vector2(T x, T y)
{
return Vector<T, 2>({x, y});
}
template <typename T>
[[nodiscard]]
constexpr Vector<T, 3> make_vector3(T x, T y, T z)
{
return Vector<T, 3>({x, y, z});
}
template <typename T, typename U>
[[nodiscard]]
constexpr Vector<T, 3> make_vector3(const Vector<U, 2>& v, T z)
{
return Vector<T, 3>({T(v[0]), T(v[1]), z});
}
template <typename T>
[[nodiscard]]
constexpr Vector<T, 4> make_vector4(T x, T y, T z, T w)
{
return Vector<T, 4>({x, y, z, w});
}
template <typename T, typename U>
[[nodiscard]]
constexpr Vector<T, 4> make_vector4(const Vector<U, 3>& v, T w)
{
return Vector<T, 4>({T(v[0]), T(v[1]), T(v[2]), w});
}
template <typename T, typename U, unsigned N>
[[nodiscard]]
Vector<T, N> vector_cast(const Vector<U, N>& v)
{
if constexpr (std::is_same<T, U>::value)
{
return v;
}
else
{
Vector<T, N> result;
for (unsigned i = 0; i < N; ++i)
result[i] = static_cast<T>(v[i]);
return result;
}
}
template <unsigned Index, typename T, unsigned N>
[[nodiscard]]
T& get(Vector<T, N>& v)
{
static_assert(Index < N, "Incorrect vector index.");
return v[Index];
}
template <unsigned Index, typename T, unsigned N>
[[nodiscard]]
T get(const Vector<T, N>& v)
{
static_assert(Index < N, "Incorrect vector index.");
return v[Index];
}
template <typename T, typename S, unsigned N>
[[nodiscard]]
auto dot(const Vector<T, N>& u, const Vector<S, N>& v)
{
decltype(T() * S()) result = 0;
for (unsigned i = 0; i < N; ++i)
result += u[i] * v[i];
return result;
}
template <typename T, unsigned N>
[[nodiscard]]
auto get_length_squared(const Vector<T, N>& v)
{
return dot(v, v);
}
template <typename T, unsigned N>
[[nodiscard]]
auto get_length(const Vector<T, N>& v)
{
return std::sqrt(get_length_squared(v));
}
template <typename T>
[[nodiscard]]
Vector<T, 2> get_normal(const Vector<T, 2>& v)
{
return make_vector2(-v[1], v[0]);
}
template <typename T, unsigned N,
typename std::enable_if_t<std::is_integral_v<T>, int> = 0>
[[nodiscard]]
bool are_equivalent(const Vector<T, N>& u, const Vector<T, N>& v, T = 0)
{
return u == v;
}
template <typename T, unsigned N,
typename std::enable_if_t<std::is_floating_point_v<T>, int> = 0>
[[nodiscard]]
bool are_equivalent(const Vector<T, N>& u, const Vector<T, N>& v,
T margin = Constants<T>::DEFAULT_MARGIN)
{
return get_length_squared(u - v) <= margin;
}
template <typename T, unsigned N>
[[nodiscard]]
auto get_cos_angle(const Vector<T, N>& u, const Vector<T, N>& v)
{
return dot(u, v) / std::sqrt(get_length_squared(u) * get_length_squared(v));
}
/** @brief Returns the smallest angle between @a u and @a v.
* @return A value in the range 0 <= angle <= pi.
*/
template <typename T, unsigned N>
[[nodiscard]]
auto get_angle(const Vector<T, N>& u, const Vector<T, N>& v)
{
return std::acos(get_cos_angle(u, v));
}
template <typename T>
[[nodiscard]]
auto get_ccw_angle(const Vector<T, 2>& u, const Vector<T, 2>& v)
{
auto angle = get_angle(u, v);
if (dot(get_normal(u), v) >= 0)
return angle;
else
return 2 * Constants<decltype(angle)>::PI - angle;
}
template <typename T>
[[nodiscard]]
Vector<T, 3> cross(const Vector<T, 3>& a, const Vector<T, 3>& b)
{
return {a[1] * b[2] - a[2] * b[1],
a[2] * b[0] - a[0] * b[2],
a[0] * b[1] - a[1] * b[0]};
}
template <typename T, unsigned N>
[[nodiscard]]
auto get_unit(const Vector<T, N>& v)
{
return v / get_length(v);
}
template <typename T, unsigned N>
Vector<T, N>& scale_inplace(Vector<T, N>& v, T new_length)
{
return v *= (new_length / get_length(v));
}
template <typename T, unsigned N>
[[nodiscard]]
Vector<T, N> get_scaled(const Vector<T, N>& v, T new_length)
{
return v * (new_length / get_length(v));
}
template <typename T, unsigned N>
Vector<T, N>& clamp_inplace(Vector<T, N>& v, T min, T max)
{
clamp_range(begin(v), end(v), min, max);
return v;
}
template <typename T, unsigned N>
[[nodiscard]]
Vector<T, N> get_clamped(Vector<T, N> v, T min, T max)
{
clamp_inplace(v, min, max);
return v;
}
template <typename T>
[[nodiscard]]
Vector<T, 2> reflect(const Vector<T, 2>& v,
const Vector<T, 2>& mirror)
{
auto n = get_normal(mirror);
return v - 2 * (v * n) * n;
}
template <typename T>
[[nodiscard]]
Vector<T, 2> rotate(const Vector<T, 2>& v, double radians)
{
auto c = std::cos(radians);
auto s = std::sin(radians);
return make_vector2(T(v[0] * c - v[1] * s), T(v[0] * s + v[1] * c));
}
template <typename T, unsigned N,
typename std::enable_if_t<std::is_integral_v<T>, int> = 0>
[[nodiscard]]
bool is_null(Vector<T, N>& v, T = 0)
{
return std::none_of(v.begin(), v.end(),
[](auto n){return n != 0;});
}
template <typename T, unsigned N,
typename std::enable_if_t<std::is_floating_point_v<T>, int> = 0>
[[nodiscard]]
bool is_null(Vector<T, N>& v, T margin = Constants<T>::DEFAULT_MARGIN)
{
return std::none_of(v.begin(), v.end(),
[&](auto n) {return fabs(n) > margin;});
}
using Vector2I = Vector<int, 2>;
using Vector2F = Vector<float, 2>;
using Vector2D = Vector<double, 2>;
using Vector3I = Vector<int, 3>;
using Vector3F = Vector<float, 3>;
using Vector3D = Vector<double, 3>;
using Vector4I = Vector<int, 4>;
using Vector4F = Vector<float, 4>;
using Vector4D = Vector<double, 4>;
}
template <typename T, unsigned N>
struct std::tuple_size<Xyz::Vector<T, N>>
: std::integral_constant<unsigned, N>
{};
template <size_t I, typename T, unsigned N>
struct std::tuple_element<I, Xyz::Vector<T, N>>
{
using type = T;
};
<file_sep>/include/Xyz/CoordinateSystem.hpp
//****************************************************************************
// Copyright © 2019 <NAME>. All rights reserved.
// Created by <NAME> on 2019-07-23.
//
// This file is distributed under the BSD License.
// License text is included with the source distribution.
//****************************************************************************
#pragma once
#include "InvertMatrix.hpp"
namespace Xyz
{
template <typename T>
class CoordinateSystem
{
public:
CoordinateSystem() = default;
CoordinateSystem(const Vector<T, 3>& origin,
const Vector<T, 3>& axis1,
const Vector<T, 3>& axis2,
const Vector<T, 3>& axis3)
: m_origin(origin)
{
set_row(m_from_world, 0, axis1 / get_length_squared(axis1));
set_row(m_from_world, 1, axis2 / get_length_squared(axis2));
set_row(m_from_world, 2, axis3 / get_length_squared(axis3));
m_to_world = invert(m_from_world);
}
CoordinateSystem(const Vector<T, 3>& origin,
const Matrix<T, 3, 3>& from_world_transform)
: m_origin(origin),
m_to_world(invert(from_world_transform)),
m_from_world(from_world_transform)
{}
constexpr const Vector<T, 3>& origin() const
{
return m_origin;
}
constexpr const Matrix<T, 3, 3>& from_world_transform() const
{
return m_from_world;
}
constexpr const Matrix<T, 3, 3>& to_world_transform() const
{
return m_to_world;
}
template <typename U>
constexpr auto to_world_pos(const Vector<U, 3>& p) const
{
return m_to_world * p + m_origin;
}
template <typename U>
constexpr auto from_world_pos(const Vector<U, 3>& p) const
{
return m_from_world * (p - m_origin);
}
private:
Vector<T, 3> m_origin;
Matrix<T, 3, 3> m_to_world;
Matrix<T, 3, 3> m_from_world;
};
}
<file_sep>/tests/XyzTest/test_Triangle.cpp
//****************************************************************************
// Copyright © 2016 <NAME>. All rights reserved.
// Created by <NAME> on 06.03.2016.
//
// This file is distributed under the BSD License.
// License text is included with the source distribution.
//****************************************************************************
#include "Xyz/Xyz.hpp"
#include "Ytest/Ytest.hpp"
namespace
{
void test_contains_point()
{
auto triangle = Xyz::make_triangle(Xyz::make_vector2(-5, 0),
Xyz::make_vector2(5, 0),
Xyz::make_vector2(0, 5));
Y_ASSERT(contains_point(triangle, Xyz::make_vector2(0, 2), 1e-12));
Y_ASSERT(!contains_point(triangle, Xyz::make_vector2(-3, 2), 1e-12));
}
void test_contains_point_inclusive()
{
auto tri = Xyz::make_triangle(Xyz::make_vector2(-5, 0),
Xyz::make_vector2(5, 0),
Xyz::make_vector2(0, 5));
Y_ASSERT(contains_point_inclusive(tri, Xyz::make_vector2(0, 2), 1e-12));
Y_ASSERT(contains_point_inclusive(tri, Xyz::make_vector2(-3, 2), 1e-12));
}
void test_get_area()
{
auto tri = Xyz::make_triangle(Xyz::make_vector3(3, 1, 2),
Xyz::make_vector3(7, 5, 2),
Xyz::make_vector3(3, 7, 2));
Y_EQUIVALENT(Xyz::get_area_squared(tri), 144, 1e-10);
Y_EQUIVALENT(Xyz::get_area(tri), 12, 1e-10);
}
Y_SUBTEST("Geometry",
test_contains_point,
test_contains_point_inclusive,
test_get_area);
}
<file_sep>/include/Xyz/FloatType.hpp
//****************************************************************************
// Copyright © 2019 <NAME>. All rights reserved.
// Created by <NAME> on 2019-08-06.
//
// This file is distributed under the BSD License.
// License text is included with the source distribution.
//****************************************************************************
#pragma once
namespace Xyz
{
template <typename T>
struct FloatType
{
using type = double;
};
template <>
struct FloatType<float>
{
using type = float;
};
template <>
struct FloatType<long double>
{
using type = long double;
};
}
<file_sep>/include/Xyz/Xyz.hpp
//****************************************************************************
// Copyright © 2015 <NAME>. All rights reserved.
// Created by <NAME> on 2015-12-18.
//
// This file is distributed under the BSD License.
// License text is included with the source distribution.
//****************************************************************************
#pragma once
#include "ComplexApprox.hpp"
#include "Line.hpp"
#include "LineClipping.hpp"
#include "LineLineIntersection.hpp"
#include "LineSegment.hpp"
#include "Matrix.hpp"
#include "MatrixTransformations.hpp"
#include "Mesh.hpp"
#include "ProjectionMatrix.hpp"
#include "QuadraticEquation.hpp"
#include "RandomGenerator.hpp"
#include "SphericalPoint.hpp"
#include "Triangle.hpp"
#include "Utilities.hpp"
#include "Vector.hpp"
<file_sep>/include/Xyz/LineSegment.hpp
//****************************************************************************
// Copyright © 2016 <NAME>. All rights reserved.
// Created by <NAME> on 2016-02-07.
//
// This file is distributed under the BSD License.
// License text is included with the source distribution.
//****************************************************************************
#pragma once
#include "Clamp.hpp"
#include "Vector.hpp"
namespace Xyz
{
template <typename T, unsigned N>
class LineSegment
{
public:
LineSegment() = default;
LineSegment(const Vector<T, N>& start, const Vector<T, N>& end)
: m_start(start),
m_end(end)
{}
const Vector<T, N>& end() const
{
return m_end;
}
void set_end(const Vector<T, N>& end)
{
m_end = end;
}
const Vector<T, N>& start() const
{
return m_start;
}
void set_start(const Vector<T, N>& start)
{
m_start = start;
}
private:
Vector<T, N> m_start;
Vector<T, N> m_end;
};
template <typename T, unsigned N>
const Vector<T, N>& get_start(const LineSegment<T, N>& line)
{
return line.start();
}
template <typename T, unsigned N>
const Vector<T, N>& get_end(const LineSegment<T, N>& line)
{
return line.end();
}
template <typename T, unsigned N>
Vector<T, N> get_vector(const LineSegment<T, N>& line)
{
return get_end(line) - get_start(line);
}
template <typename T, unsigned N>
std::ostream& operator<<(std::ostream& os, const LineSegment<T, N>& line)
{
return os << "{\"start\": " << get_start(line)
<< ", \"end\": " << get_end(line) << "}";
}
template <typename T, unsigned N>
Vector<T, N> get_point_at_t(const LineSegment<T, N>& line,
typename FloatType<T>::type t)
{
return get_start(line) + vector_cast<T>(get_vector(line) * t);
}
template <typename T, unsigned N>
T get_coordinate_at_t(const LineSegment<T, N>& line, size_t coord,
typename FloatType<T>::type t)
{
auto v0 = get_start(line)[coord];
auto v1 = get_end(line)[coord];
return v0 + static_cast<T>(t * (v1 - v0));
}
template <typename T, unsigned N>
LineSegment<T, N> make_line_segment(const Vector<T, N>& start,
const Vector<T, N>& end)
{
return LineSegment<T, N>(start, end);
}
template <typename T, unsigned N>
double get_length(const LineSegment<T, N>& line)
{
return get_length(get_vector(line));
}
template <typename T, unsigned N>
LineSegment<T, N> get_reverse(const LineSegment<T, N>& line)
{
return LineSegment<T, N>(get_end(line), get_start(line));
}
template <typename T, unsigned N>
Vector<T, N> get_nearest_point(const LineSegment<T, N>& line,
const Vector<T, N>& point)
{
auto divisor = get_length_squared(get_vector(line));
auto t = (point - get_start(line)) * get_vector(line) / divisor;
return get_point_at_t(line, clamp(t, 0.0, 1.0));
}
template <typename T>
Vector<T, 2> get_relative_position(const LineSegment<T, 2>& line,
const Vector<T, 2>& point)
{
auto lv = get_vector(line);
auto len = get_length_squared(lv);
auto pv = point - get_start(line);
return make_vector2<T>(lv * pv / len, get_normal(lv) * pv / len);
}
}
<file_sep>/misc/removed/Xyz/Utilities/GetNearest.hpp
//****************************************************************************
// Copyright © 2015 <NAME>. All rights reserved.
// Created by <NAME> on 2015-12-18.
//
// This file is distributed under the BSD License.
// License text is included with the source distribution.
//****************************************************************************
#pragma once
#include <cmath>
#include "Xyz/Constants.hpp"
namespace Xyz
{
template<typename T>
T getSign(T value)
{
if (value > 0)
return T(1);
else if (value < 0)
return T(-1);
else
return T(0);
}
constexpr double toRadians(double degrees)
{
return degrees * Pi / 180;
}
constexpr double toDegrees(double radians)
{
return radians * 180 / Pi;
}
inline int modulo(int dividend, int divisor)
{
if (dividend >= 0)
return dividend % divisor;
else
return divisor - (-dividend % divisor);
}
}
<file_sep>/tests/XyzTest/test_InvertMatrix.cpp
//****************************************************************************
// Copyright © 2019 <NAME>. All rights reserved.
// Created by <NAME> on 2019-08-05.
//
// This file is distributed under the BSD License.
// License text is included with the source distribution.
//****************************************************************************
#include "Xyz/InvertMatrix.hpp"
#include "Ytest/Ytest.hpp"
namespace
{
using namespace Xyz;
template <typename T, unsigned N>
bool equals_identity_matrix(const Matrix<T, N, N>& m)
{
return are_equivalent(m, make_identity_matrix<T, N>());
}
void test_invert2()
{
Matrix2D m{1, 4,
2, 1};
Y_ASSERT(equals_identity_matrix(m * invert(m)));
}
void test_invert3()
{
Matrix3D m{1, 4, 2,
2, 1, 3,
5, 7, 2};
Y_ASSERT(equals_identity_matrix(m * invert(m)));
}
void test_invert4()
{
Matrix4D m{1, -2, 3, 2,
2, 3, 1, -1,
3, 7, 0, 3,
-1, 4, 2, 1};
Y_ASSERT(equals_identity_matrix(m * invert(m)));
}
Y_TEST(test_invert2,
test_invert3,
test_invert4);
}
|
b21b07fc237a764474eb9995783c1b714356bf5f
|
[
"CMake",
"C++",
"Shell"
] | 56 |
C++
|
jebreimo/Xyz
|
2f3a6773345c28ad2925e24e89cab296561b42c5
|
a26b7d93fbb112ebf6c13369c16313db82b68550
|
refs/heads/master
|
<file_sep>// Write a CommonJS module, based on the example from Chapter 7,
// which contains the array of roads and exports the graph data structure
// representing them as roadGraph. It should depend on a module ./graph, which
// exports a function buildGraph that is used to build the graph. This function
// expects an array of two-element arrays (the start and end points of the roads).
// const {buildGraph} = require("./graph");
const buildMap = require("./graph");
const roads = [
"Alice's House-Bob's House", "Alice's House-Cabin",
"Alice's House-Post Office", "Bob's House-Town Hall",
"Daria's House-Ernie's House", "Daria's House-Town Hall",
"Ernie's House-Grete's House", "Grete's House-Farm",
"Grete's House-Shop", "Marketplace-Farm",
"Marketplace-Post Office", "Marketplace-Shop",
"Marketplace-Town Hall", "Shop-Town Hall"
];
// exports.roadGraph = buildGraph(roads.map(r => r.split("-")));
const roadGraph = buildMap(roads);
console.log(roadGraph);
// {
// Alice's House: {
// Bob's House: 1
// Cabin: 1
// Post Office: 1
// }
// Bob's House: {Alice's House: 1, Town Hall: 1}
// Cabin: {Alice's House: 1}
// Post Office: {Alice's House: 1, Marketplace: 1}
// Town Hall: {
// Bob's House: 1
// Daria's House: 1
// Marketplace: 1
// Shop: 1
// }
// Daria's House: {Ernie's House: 1, Town Hall: 1}
// Ernie's House: {Daria's House: 1, Grete's House: 1}
// Grete's House: {Ernie's House: 1, Farm: 1, Shop: 1}
// Farm: {Grete's House: 1, Marketplace: 1}
// Shop: {
// Grete's House: 1
// Marketplace: 1
// Town Hall: 1
// }
// Marketplace: {
// Farm: 1
// Post Office: 1
// Shop: 1
// Town Hall: 1
// }
<file_sep>let out = "";
for(let counter = 1;counter <= 100;counter++){
if (counter%3 === 0) {
out = "Fizz";
}
if (counter%5 === 0) {
out += "Buzz";
}
if (out == "") {
out =counter;
}
console.log(out);
out = "";
}
<file_sep>//Use the reduce method in combination with the concat method to “flatten”
//an array of arrays into a single array that has all the elements of the
// original arrays.
function flatten(inArray) {
let flatArray = [];
for (let curArray of inArray) {
for (let curElement of curArray) {
flatArray.push(curElement);
}
}
return flatArray;
}
function flatten2(inArray) {
return inArray.reduce(function(previous, element) {
previous = previous.concat(element);
return previous;
},[]);
}
let arrays = [[1, 2, 3], [4, 5], [6]];
console.log(flatten(arrays));
console.log(flatten2(arrays));
<file_sep>// To export roadGraph, you add a property to the exports object. Because
// buildGraph takes a data structure that doesn’t precisely match roads, the
// splitting of the road strings must happen in your module.
function buildGraph(edges) {
let graph = Object.create(null);
function addEdge(from, to) {
if (graph[from] == null) {
graph[from] = [to];
} else {
graph[from].push(to);
}
}
for (let [from, to] of edges.map(r => r.split("-"))) {
addEdge(from, to);
addEdge(to, from);
}
return graph;
}
module.exports = buildGraph;
<file_sep>function isEven(num){
if (num < 0) {
return "Number must be at least 0. Please try again.";
}
else if (num === 0) {
return true;
}
else if (num === 1) {
return false;
}
else {
return isEven(num-2);
}
}
console.log(isEven(75));
<file_sep>function reverseArray(inArray) {
let newArray = [];
for (let cur of inArray) {
newArray.unshift(cur);
}
return newArray;
}
function reverseArrayInPlace(arrayValue) {
let arraySize = arrayValue.length;
// let revArray = reverseArray(arrayValue);
//
// for (let counter = 0; counter < arraySize; counter++) {
// arrayValue[counter] = revArray[counter];
// }
let j = 1;
halfArraySize = Math.trunc(arraySize/2);
for (i = 0; i < arraySize - 1; i++) {
let temp = arrayValue[i];
arrayValue[i] = arrayValue[arraySize - j];
arrayValue[arraySize - j] = temp;
j++;
}
}
console.log(reverseArray(["A", "B", "C"]));
let arrayValue = [1, 2, 3, 4, 5];
reverseArrayInPlace(arrayValue);
console.log(arrayValue);
<file_sep>let size = 8;
let lineA="";
let lineB="";
let line="";
for (let count=0;count<size/2;count++) {
lineA += " #";
lineB += "# "
}
line = lineA +"\n" + lineB;
for (let counter=0;counter<size/2;counter++) {
console.log(line);
}
<file_sep>//Implement every as a function that takes an array and a predicate function as
// parameters. Write two versions, one using a loop and one using the some method.
function every(array, test) {
// return true if every element of array passes test
}
function everySome(array, test) {
}
console.log(every([1, 3, 5], n => n < 10));
// → true
console.log(every([2, 4, 16], n => n < 10));
// → false
console.log(every([], n => n < 10));
// → true
console.log(everySome([1, 3, 5], n => n < 10));
// → true
console.log(everySome([2, 4, 16], n => n < 10));
// → false
console.log(everySome([], n => n < 10));
// → true
<file_sep>let size = 9;
let lineA="";
let lineB="";
let line="";
let out = "";
let halfSize = Math.trunc(size/2);
for (let count=0;count<halfSize;count++) {
lineA += " #";
lineB += "# "
}
if (size % 2 == 1) {
lineB += "#";
}
line = lineA +"\n" + lineB;
for (let counter=0;counter<halfSize;counter++) {
out = out + line + "\n";
}
if (size % 2 == 1) {
out = out + lineA;
}
console.log(out);
<file_sep>function reverseArray(inArray) {
let newArray = [];
for (let cur of inArray) {
newArray.unshift(cur);
}
return newArray;
}
function arrayToList(inArray) {
let list = null;
let next = null;
let revArray = reverseArray(inArray);
let arraySize = revArray.length;
for (counter = 0; counter < arraySize; counter++) {
next = list;
list = {value: revArray[counter], rest:next};
}
return list;
}
function listToArray(list) {
let newArray = [];
let i = 0;
for (let node = list; node; node = node.rest) {
// console.log("node.value = " + node.value);
newArray[i++] = node.value;
}
return newArray;
}
function prepend(newElement,list) {
// takes an element and a list and creates a new list that adds the element
// to the front of the input list,
list = {value : newElement, rest: list}
return list;
}
function nth(list, element) {
// reg and recursive
// takes a list and a number and returns the element at the given position
// in the list (with zero referring to the first element)
// or undefined when there is no such element.
let counter = 0;
let theValue = undefined;
for (let node = list; node; node = node.rest) {
if (counter == element) {
theValue = node.value;
}
counter++;
}
return theValue;
}
function nthRecursive(list, element) {
// The recursive version of nth will, similarly, look at an ever smaller part of
// the “tail” of the list and at the same time count down the index until it
// reaches zero, at which point it can return the value property of the node it
// is looking at. To get the zeroeth element of a list,
// you simply take the value property of its head node. To get element N + 1,
// you take the _N_th element of the list that’s in this list’s rest property.
var counter = 0;
if (list == null) {
return undefined;
}
else if (counter == element) {
return list.value;
}
else {
return nthRecursive(list.rest,element-1);
}
}
console.log(arrayToList([10, 20]));
// → {value: 10, rest: {value: 20, rest: null}}
console.log(listToArray(arrayToList([10, 20, 30])));
// → [10, 20, 30]
console.log(prepend(10, prepend(20, null)));
// → {value: 10, rest: {value: 20, rest: null}}
console.log(nth(arrayToList([10, 20, 30]), 1));
// → 20
console.log("fifth call");
console.log(nthRecursive(arrayToList([10, 20, 30]), 1));
// → 20
<file_sep>function countBs(inString) {
return countChar(inString,"B");
}
function countChar(inString,theChar) {
let bigChar = theChar.toUpperCase();
let littleChar = theChar.toLowerCase();
let numChars = 0;
let inStringLength = inString.length;
for (let counter = 0;counter<inStringLength;counter++) {
if (inString[counter] == littleChar || inString[counter] == bigChar) {
numChars++;
}
}
return numChars;
}
console.log(countBs("BBC"));
console.log(countChar("kakkerlak", "k"));
<file_sep>async function locateScalpel(nest) {
let scalpelLoc = nest;
// console.log(nest);
// loop through scalpel locations to the end (back to orig nest)
// this code gives all nests
//map goes through all neighbor nests
let lines = network(nest).map(async name => {
return name + ": " +
await anyStorage(nest, name, `scalpel`);
});
return (await Promise.all(lines)).join("\n");
// this code only gives first nest
let list = "";
await Promise.all(network(nest).map(async name => {
list += `${name}: ${
await anyStorage(nest, name, `scalpel`)
}\n`;
}));
return list;
}
function locateScalpel2(nest) {
// Your code here.
}
locateScalpel(bigOak).then(console.log);
// → Butcher Shop<file_sep>function giveMeAPromise() {
//fill this in and have it return a promise that resolves to the value 100
// let promise = new Promise.resolve(100);
return Promise.resolve(100);
}
giveMeAPromise().then((value)=> {
console.log(value);
// this should print 100
});
<file_sep>function deepEqual(val1,val2) {
// let theyAreEqual = false;
if (val1 == null && val2 == null) {
// console.log("case 1 both null");
return true;
}
else if (val1 == null || val2 == null) {
// console.log("case 2 one null");
return false;
}
else if (val1 === val2) {
// console.log("case 3 ===");
return true;
}
else { // objects must have same properties and values
// console.log("case 4 both objects");
if (Object.keys(val1).length == Object.keys(val2).length) {
keysOfVal1 = Object.keys(val1) ;
keysOfVal2 = Object.keys(val2) ;
keyLeng = keysOfVal1.length;
if (typeof(val1) == typeof(val2)) {
for (let i = 0; i < keyLeng; i++) {
// for (let i of keysOfVal1 )
if (keysOfVal1[i] == keysOfVal2[i]) {
let val1Key = Object.keys(val1)[i]; //the obj with the key of keysOfVal1[i]
let val2Key = Object.keys(val2)[i]; //the obj with the key of keysOfVal1[i]
return deepEqual(val1[val1Key], val2[val2Key]); // call with name of next level obj ??
}
} // end for
return true;
}
else {
return false;
}
} // if length == length
else {
return false;
}
} // end else we have objects
}
// actual tests
let obj = {here: {is: "an"}, object: 2};
console.log("actual test 1: sb true");
console.log(deepEqual(obj, obj));
// → true
console.log("actual test 2: sb false");
console.log(deepEqual(obj, {here: 1, object: 2}));
// → false
console.log("actual test 3: sb true");
console.log(deepEqual(obj, {here: {is: "an"}, object: 2}));
// → true
<file_sep>function getPromiseOne() {
return Promise.resolve(30);
}
function getPromiseTwo() {
return Promise.resolve(50);
}
//add the two values from the promises and print the sum, 80
Promise.all([getPromiseOne(), getPromiseTwo()]).then(function(values){
console.log(values[0] + values[1]);
})
<file_sep>function deepEqual(val1,val2) {
if (val1 == null && val2 == null) {
// console.log("case 1 both null");
return true;
}
else if (val1 == null || val2 == null) {
// console.log("case 2 one null");
return false;
}
else if (val1 === val2) {
// console.log("case 3 ===");
return true;
}
else { // objects must have same properties and values
if(JSON.stringify(val1) == JSON.stringify(val2)) {
return true;
}
else {
return false;
}
} // end else we have objects
}
let obj = {here: {is: "an"}, object: 2};
console.log(deepEqual(obj, obj));
console.log(deepEqual(obj, {here: 1, object: 2}));
// → false
console.log(deepEqual(obj, {here: {is: "an"}, object: 2}));
// → true
<file_sep>// Write a higher-order function loop that provides something like a for loop
//statement. It takes a value, a test function, an update function, and a body
//function. Each iteration, it first runs the test function on the current loop
// value, and stops if that returns false. Then it calls the body function,
// giving it the current value. And finally, it calls the update function to
//create a new value, and starts from the beginning.
//When defining the function, you can use a regular loop to do the actual looping
// loop(3, n => n > 0, n => n - 1, console.log);
// → 3
// → 2
// → 1
function loop(value, test, update, body)
{
console.log(`value = ${value}`)
console.log(typeof(test), `${test}`);
// run test func on value, stop if it is false
if (test(value) === false) { //this is getting an error saying that test is not a function
return;
}
// call body func on cur value
else {
body(value);
}
// call update
loop(update(value),test,update,body);
// run again on new value
}
loop(3, n => n > 0, n => n - 1, console.log);
|
121c5fe4b7748417674f1f0d674a5ac13e70bdb0
|
[
"JavaScript"
] | 17 |
JavaScript
|
Scubamama/HoursWExperts
|
430165e0382ef715d600505f9655dc6c323433bf
|
bd8d895a1b5b6ba65f1bc4f657fb84e10e20e0d6
|
refs/heads/main
|
<repo_name>LilWingXYZ/Cryptography-Experiment<file_sep>/实验题目/实验6 - 数字签名/Sign/sign.cpp
#include <string.h>
#include <stdlib.h>
#include "R_STDLIB.C"
#include "R_RANDOM.C"
#include "NN.C"
#include "RSA.C"
#include "DIGIT.C"
#include "MD5C.C"
#include "PRIME.C"
#include "R_KEYGEN.C"
#include "DESC.C"
#include "MD2C.C"
#include "R_ENCODE.C"
#include "R_ENHANC.C"
#include <stdio.h>
void shows (char *msg, unsigned char *output, unsigned int len)
{ printf ("%s: ", msg);
for (unsigned int i=0; i<len; i++)
printf("%x", output[i]);
printf("\n");
}
void seed_randomStruct (unsigned char *seed, R_RANDOM_STRUCT *randomStruct)
{
unsigned int bytesNeeded = 256; //结构体所需种子长度
R_RandomInit (randomStruct);
while (bytesNeeded > 0)
{
R_RandomUpdate (randomStruct, seed,
strlen((char *)seed));
R_GetRandomBytesNeeded (&bytesNeeded, randomStruct);
}
}
void create_RSAkey (R_RSA_PUBLIC_KEY *publicKey, R_RSA_PRIVATE_KEY *privateKey, unsigned int modul_bits,
int useFermat4, R_RANDOM_STRUCT *randomStruct)
{
R_RSA_PROTO_KEY protoKey;
int flag;
protoKey.bits = modul_bits; //设定模数长度
protoKey.useFermat4 = useFermat4; //设定e
flag = R_GeneratePEMKeys
(publicKey, privateKey, &protoKey, randomStruct); // 产生RSA密钥
if (RE_MODULUS_LEN == flag)
{ printf ("modulus length invalid\n"); exit(0); }
else if (RE_NEED_RANDOM == flag)
{ printf ("randomStruct is not seeded\n"); exit(0); }
}
int main(int argc, char* argv[])
{
R_RANDOM_STRUCT randomStruct;
R_RSA_PUBLIC_KEY publicKey;
R_RSA_PRIVATE_KEY privateKey;
R_SIGNATURE_CTX Scontext, Vcontext;
unsigned char seed[] = "asdfsafsafs2341131231";
unsigned char signature[MAX_ENCRYPTED_KEY_LEN];
unsigned int signatureLen;
FILE *fp;
unsigned char line[1000];
// 填充随机数结构体,并产生RSA密钥
seed_randomStruct (seed, &randomStruct);
create_RSAkey
(&publicKey, &privateKey,1024, 1, &randomStruct);
// 对文件file1.txt产生签名
if(NULL ==(fp= fopen("file1.txt", "r" )))
{ printf("open file1 error\n"); return 0; }
if (RE_DIGEST_ALGORITHM == R_SignInit (&Scontext, DA_MD5))
{ printf ("digestAlgorithm is invalid\n"); return 0; }
while(fgets((char *)line, 1000, fp))
R_SignUpdate (&Scontext, line, strlen((char *)line));
fclose (fp);
if (RE_PRIVATE_KEY == R_SignFinal
(&Scontext, signature, &signatureLen, &privateKey))
{
printf ("privateKey cannot encrypt message digest\n");
return 0;
}
shows("signature", signature, signatureLen);
// 校验签名
if(NULL ==(fp= fopen("file2.txt", "r" )))
{ printf("open file2 error\n"); return 0; }
if (RE_DIGEST_ALGORITHM == R_VerifyInit (&Vcontext, DA_MD5))
{ printf ("digestAlgorithm is invalid\n"); return 0;}
while(fgets((char *)line, 1000, fp))
R_VerifyUpdate
(&Vcontext, line, strlen((char *)line));
fclose (fp);
int ret = R_VerifyFinal
(&Vcontext, signature, signatureLen, &publicKey);
printf ("verify result: ");
switch (ret)
{
case 0:
printf("success\n"); break;
case RE_SIGNATURE:
printf("signature is incorrect\n"); break;
case RE_LEN:
printf("signatureLen out of range\n"); break;
case RE_PUBLIC_KEY:
printf("publicKey cannot decrypt signature\n"); break;
}
R_RandomFinal (&randomStruct);
return 0;
}
<file_sep>/实验题目/实验5 - 数字信封/DES_CBC_RSA/1.cpp
#include <string.h>
#include <stdlib.h>
#include "R_STDLIB.C"
#include "R_RANDOM.C"
#include "NN.C"
#include "RSA.C"
#include "DIGIT.C"
#include "MD5C.C"
#include "PRIME.C"
#include "R_KEYGEN.C"
#include <stdio.h>
/*typedef struct PackageCiphertext //(没有)
{
unsigned char output[TEXT_LEN]; //密文
unsigned int outputlen; //密文长度
unsigned char sealedkey[MAX_ENCRYPTED_KEY_LEN]; //封装后的会话密钥
unsigned int sealedkeylen; //封装后的会话密钥长度
}PackageCiphertext;*/
// 以十六进制形式显示output中的内容
void shows (unsigned char *output, unsigned int len)
{ printf ("ciphertext: ");
for (unsigned int i=0; i<len; i++)
printf("%x", output[i]);
printf("\n");
}
//填充随机数结构体 (正确)
void seed_randomStruct (unsigned char *seed, R_RANDOM_STRUCT *randomStruct)
{
unsigned int bytesNeeded = 256; //结构体所需种子长度
R_RandomInit (randomStruct);
while (bytesNeeded > 0)
{
R_RandomUpdate (randomStruct, seed,
strlen((char *)seed));
R_GetRandomBytesNeeded (&bytesNeeded, randomStruct);
}
}
#define PLAINTEXT_LEN 16 //明文长度
int main(int argc, char* argv[])
{
unsigned char seed[] = "3adqwe1212asd"; // 种子
unsigned char input[PLAINTEXT_LEN+1] = "12345678abcdefgh"; // 明文
unsigned char output[MAX_ENCRYPTED_KEY_LEN]="";
unsigned char output2[PLAINTEXT_LEN+1]="";
unsigned int outputlen, outputlen2;
int flag;
R_RSA_PUBLIC_KEY publicKey;
R_RSA_PRIVATE_KEY privateKey;
R_RSA_PROTO_KEY protoKey;
R_RANDOM_STRUCT randomStruct;
protoKey.bits = 1024; //设定模数长度为1024
protoKey.useFermat4 = 1; //设定e=65537
seed_randomStruct (seed, &randomStruct); // 填充随机数结构体
flag = R_GeneratePEMKeys
(&publicKey, &privateKey, &protoKey, &randomStruct); // 产生RSA密钥
if (RE_MODULUS_LEN == flag)
{ printf ("modulus length invalid\n"); exit(0); }
else if (RE_NEED_RANDOM == flag)
{ printf ("randomStruct is not seeded\n"); exit(0); }
// 显示明文
printf ("plaintext: %s\n", input);
// 加密
RSAPublicEncrypt (output, &outputlen, input, strlen((char *)input),
&publicKey, &randomStruct);
// 显示密文
shows(output, outputlen);
// 解密
RSAPrivateDecrypt (output2, &outputlen2, output, outputlen,
&privateKey);
printf("decrypted ciphertext: %s\n", output2);
R_RandomFinal (&randomStruct);
return 0;
}
<file_sep>/实验完成代码/实验7 - DH/R_DH.C
/* R_DH.C - Diffie-Hellman routines for RSAREF
*/
/* Copyright (C) RSA Laboratories, a division of RSA Data Security,
Inc., created 1993. All rights reserved.
*/
#include "global.h"
#include "rsaref.h"
#include "r_random.h"
#include "nn.h"
#include "prime.h"
/* Generates Diffie-Hellman parameters.
*/
int R_GenerateDHParams (R_DH_PARAMS *params, unsigned int primeBits, unsigned int subPrimeBits, R_RANDOM_STRUCT *randomStruct)
{
int status;
NN_DIGIT g[MAX_NN_DIGITS], p[MAX_NN_DIGITS], q[MAX_NN_DIGITS],
t[MAX_NN_DIGITS], u[MAX_NN_DIGITS], v[MAX_NN_DIGITS];
unsigned int pDigits;
pDigits = (primeBits + NN_DIGIT_BITS - 1) / NN_DIGIT_BITS;
/* Generate subprime q between 2^(subPrimeBits-1) and
2^subPrimeBits-1, searching in steps of 2.
*/
NN_Assign2Exp (t, subPrimeBits-1, pDigits);
NN_Assign (u, t, pDigits);
NN_ASSIGN_DIGIT (v, 1, pDigits);
NN_Sub (v, t, v, pDigits);
NN_Add (u, u, v, pDigits);
NN_ASSIGN_DIGIT (v, 2, pDigits);
if (status = GeneratePrime (q, t, u, v, pDigits, randomStruct))
return (status);
/* Generate prime p between 2^(primeBits-1) and 2^primeBits-1,
searching in steps of 2*q.
*/
NN_Assign2Exp (t, primeBits-1, pDigits);
NN_Assign (u, t, pDigits);
NN_ASSIGN_DIGIT (v, 1, pDigits);
NN_Sub (v, t, v, pDigits);
NN_Add (u, u, v, pDigits);
NN_LShift (v, q, 1, pDigits);
if (status = GeneratePrime (p, t, u, v, pDigits, randomStruct))
return (status);
/* Generate generator g for subgroup as 2^((p-1)/q) mod p.
*/
NN_ASSIGN_DIGIT (g, 2, pDigits);
NN_Div (t, u, p, pDigits, q, pDigits);
NN_ModExp (g, g, t, pDigits, p, pDigits);
params->generatorLen = params->primeLen = DH_PRIME_LEN (primeBits);
NN_Encode (params->prime, params->primeLen, p, pDigits);
NN_Encode (params->generator, params->generatorLen, g, pDigits);
return (0);
}
/* Sets up Diffie-Hellman key agreement. Public value has same length
as prime.
*/
int R_SetupDHAgreement
(unsigned char *publicValue, unsigned char *privateValue, unsigned int privateValueLen, R_DH_PARAMS *params, R_RANDOM_STRUCT *randomStruct)
{
int status;
NN_DIGIT g[MAX_NN_DIGITS], p[MAX_NN_DIGITS], x[MAX_NN_DIGITS],
y[MAX_NN_DIGITS];
unsigned int pDigits, xDigits;
NN_Decode (p, MAX_NN_DIGITS, params->prime, params->primeLen);
pDigits = NN_Digits (p, MAX_NN_DIGITS);
NN_Decode (g, pDigits, params->generator, params->generatorLen);
/* Generate private value.
*/
if (status = R_GenerateBytes (privateValue, privateValueLen, randomStruct))
return (status);
NN_Decode (x, pDigits, privateValue, privateValueLen);
xDigits = NN_Digits (x, pDigits);
/* Compute y = g^x mod p.
*/
NN_ModExp (y, g, x, xDigits, p, pDigits);
NN_Encode (publicValue, params->primeLen, y, pDigits);
/* Zeroize sensitive information.
*/
R_memset ((POINTER)x, 0, sizeof (x));
return (0);
}
/* Computes agreed key from the other party's public value, a private
value, and Diffie-Hellman parameters. Other public value and
agreed-upon key have same length as prime.
Requires otherPublicValue < prime.
*/
int R_ComputeDHAgreedKey
(unsigned char *agreedKey, unsigned char *otherPublicValue, unsigned char *privateValue, unsigned int privateValueLen, R_DH_PARAMS *params)
{
NN_DIGIT p[MAX_NN_DIGITS], x[MAX_NN_DIGITS], y[MAX_NN_DIGITS],
z[MAX_NN_DIGITS];
unsigned int pDigits, xDigits;
NN_Decode (p, MAX_NN_DIGITS, params->prime, params->primeLen);
pDigits = NN_Digits (p, MAX_NN_DIGITS);
NN_Decode (x, pDigits, privateValue, privateValueLen);
xDigits = NN_Digits (x, pDigits);
NN_Decode (y, pDigits, otherPublicValue, params->primeLen);
if (NN_Cmp (y, p, pDigits) >= 0)
return (RE_DATA);
/* Compute z = y^x mod p.
*/
NN_ModExp (z, y, x, xDigits, p, pDigits);
NN_Encode (agreedKey, params->primeLen, z, pDigits);
/* Zeroize sensitive information.
*/
R_memset ((POINTER)x, 0, sizeof (x));
R_memset ((POINTER)z, 0, sizeof (z));
return (0);
}
<file_sep>/实验题目/ConsoleApplication3/ConsoleApplication3/7.cpp
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#include "NN.C"
#include "RSA.C"
#include "DIGIT.C"
#include "MD2C.C"
#include "MD5C.C"
#include "DESC.C"
#include "PRIME.C"
#include "R_STDLIB.C"
#include "R_RANDOM.C"
#include "R_KEYGEN.C"
#include "R_ENHANC.C"
#include "R_ENCODE.C"
#include "R_DH.C"
// 以十六进制形式显示output中的内容
void shows(char *text, unsigned char *output, unsigned int len){
printf("%s: ", text);
for (unsigned int i = 0; i<len; i++)
printf("%x", output[i]);
printf("\n");
}
//给R_DH_PARAMS的成员分配内存空间
void Init_DH_Params(R_DH_PARAMS *params, unsigned int bits){
params->prime = new unsigned char[DH_PRIME_LEN(bits)];
params->generator = new unsigned char[DH_PRIME_LEN(bits)];
}
//销毁分配给R_DH_PARAMS的成员的内存空间
void Destory_DH_Params(R_DH_PARAMS *params){
delete[]params->prime;
delete[]params->generator;
}
// 产生DH系统参数
int create_DH_params(R_DH_PARAMS *params, int prime_len, int subprime_len, R_RANDOM_STRUCT *randomStruct){
// 为DH系统参数成员分配空间
Init_DH_Params(params, prime_len);
// 产生DH系统参数
int ret = R_GenerateDHParams(params, prime_len, subprime_len, randomStruct);
if (RE_MODULUS_LEN == ret){
printf("prime length invalid \n");
return 0;
}
else if (RE_NEED_RANDOM == ret){
printf("randomStruct is not seeded \n");
return 0;
}
else if (RE_DATA == ret){
printf("prime bits out of range\n");
return 0;
}
return 1;
}
// 产生RSA密钥
int create_RSAkey(char *user, R_RSA_PUBLIC_KEY *publicKey, R_RSA_PRIVATE_KEY *privateKey, unsigned int modul_bits,int useFermat4, R_RANDOM_STRUCT *randomStruct){
R_RSA_PROTO_KEY protoKey;
int flag;
protoKey.bits = modul_bits; //设定模数长度
protoKey.useFermat4 = useFermat4; //设定e
flag = R_GeneratePEMKeys(publicKey, privateKey, &protoKey, randomStruct); // 产生RSA密钥
if (RE_MODULUS_LEN == flag){
printf("%s: modulus length invalid\n", user);
return 0;
}
else if (RE_NEED_RANDOM == flag){
printf("%s: randomStruct is not seeded\n", user);
return 0;
}
return 1;
}
// 产生用户的公开值、秘密值
int setup_DH_agreement(char *user, unsigned char *publicValue, unsigned char *privateValue,int privateValueLEN, R_DH_PARAMS *params,R_RANDOM_STRUCT *randomStruct){
if (RE_NEED_RANDOM == R_SetupDHAgreement(publicValue, privateValue, privateValueLEN, params, randomStruct)){
printf("%s: randomStruct is not seeded \n", user);
return 0;
}
return 1;
}
// 产生共享密钥
int create_agreementkey(unsigned char *Key, unsigned char *otherspublicValue, unsigned char *privateValue, int publicValueLen,R_DH_PARAMS *params){
if (RE_DATA == R_ComputeDHAgreedKey(Key, otherspublicValue, privateValue, publicValueLen, params))
return 0;
return 1;
}
// 对公开值计算签名
int sign(char *user, unsigned char *publicValue, int publicValueLen,unsigned char *signature, unsigned int *signatureLen,R_RSA_PRIVATE_KEY *privateKey){
R_SIGNATURE_CTX context;
if (RE_DIGEST_ALGORITHM == R_SignInit(&context, DA_MD5)){
printf("%s: digestAlgorithm is invalid\n", user);
return 0;
}
R_SignUpdate(&context, publicValue, publicValueLen);
if (RE_PRIVATE_KEY == R_SignFinal(&context, signature, signatureLen, privateKey)){
printf("%s: privateKey cannot encrypt message digest\n", user);
return 0;
}
return 1;
}
// 对收到的公开值和签名进行校验
int verify(char *user, unsigned char *publicValue, int publicValueLen,unsigned char *signature, unsigned int signatureLen,R_RSA_PUBLIC_KEY *publicKey){
R_SIGNATURE_CTX context;
if (RE_DIGEST_ALGORITHM == R_VerifyInit(&context, DA_MD5)){
printf("digestAlgorithm is invalid\n");
return 0;
}
R_VerifyUpdate(&context, publicValue, publicValueLen);
int ret = R_VerifyFinal(&context, signature, signatureLen, publicKey);
switch (ret){
case 0: printf("%s: verify success\n", user);
return 1;
case RE_SIGNATURE: printf("%s: signature is incorrect\n", user);
return 0;
case RE_LEN: printf("%s: signatureLen out of range\n", user);
return 0;
case RE_PUBLIC_KEY: printf("%s: publicKey cannot decrypt signature\n", user);
return 0;
}
return 0;
}
#define PRIME_BITS 512
#define SUBPRIME_BITS (PRIME_BITS-10)
#define PRIVATE_VALUE_LEN DH_PRIME_LEN(SUBPRIME_BITS-1)
#define PUBLIC_VALUE_LEN DH_PRIME_LEN(PRIME_BITS)
#define LEN 16
int main(){
R_DH_PARAMS params;
unsigned char seed[] = "asdfsafsafs2341131231";
R_RANDOM_STRUCT randomStruct;
unsigned char Alice_publicValue[PUBLIC_VALUE_LEN],Bob_publicValue[PUBLIC_VALUE_LEN];
unsigned char Alice_privateValue[PRIVATE_VALUE_LEN],Bob_privateValue[PRIVATE_VALUE_LEN];
unsigned char Alice_Key[PUBLIC_VALUE_LEN],Bob_Key[PUBLIC_VALUE_LEN];
unsigned char Alice_signature[MAX_ENCRYPTED_KEY_LEN],Bob_signature[MAX_ENCRYPTED_KEY_LEN];
unsigned int Alice_signLen, Bob_signLen;
R_RSA_PUBLIC_KEY Alice_publicKey, Bob_publicKey;
R_RSA_PRIVATE_KEY Alice_privateKey, Bob_privateKey;
DES_CBC_CTX context;
unsigned char input[LEN + 1] = "12345678abcdefgh";
unsigned char output[LEN];
unsigned char output2[LEN + 1] = "";
unsigned char iv[8 + 1] = "1asdf243";
unsigned int bytesNeeded = 256; //结构体所需种子长度
R_RandomInit(&randomStruct);
while (bytesNeeded > 0){
R_RandomUpdate(&randomStruct, seed,strlen((char *)seed));
R_GetRandomBytesNeeded(&bytesNeeded,&randomStruct);
}
printf("create DH params...\n");
if (!create_DH_params(¶ms, PRIME_BITS,SUBPRIME_BITS, &randomStruct))
return 1;
printf("create RSA key...\n");
// 分别产生Alice、Bob的RSA密钥
if (!create_RSAkey("Alice", &Alice_publicKey,&Alice_privateKey, 1024, 1, &randomStruct))
return 0;
if (!create_RSAkey("Bob", &Bob_publicKey,&Bob_privateKey, 1024, 1, &randomStruct))
return 0;
printf("setup DH agreement...\n");
// 分别产生Alice、Bob的DH公开值、秘密值
if (!setup_DH_agreement("Alice", Alice_publicValue, Alice_privateValue,PRIVATE_VALUE_LEN, ¶ms, &randomStruct))
return 0;
if (!setup_DH_agreement("Bob", Bob_publicValue, Bob_privateValue,PRIVATE_VALUE_LEN, ¶ms, &randomStruct))
return 0;
printf("sign DH public value...\n");
//分别对自己的公开值进行签名
if (!sign("Alice", Alice_publicValue,PUBLIC_VALUE_LEN, Alice_signature,&Alice_signLen, &Alice_privateKey))
return 0;
if (!sign("Bob", Bob_publicValue,PUBLIC_VALUE_LEN, Bob_signature,&Bob_signLen, &Bob_privateKey))
return 0;
printf("\nsending to each other.\n\n");
printf("verify DH public value...\n");
// 分别校验收到的公开值和签名的有效性(是否在传输时被篡改)
if (!verify("Alice", Bob_publicValue,PUBLIC_VALUE_LEN, Bob_signature,Bob_signLen, &Bob_publicKey))
return 0;
if (!verify("Bob", Alice_publicValue,PUBLIC_VALUE_LEN, Alice_signature,Alice_signLen, &Alice_publicKey))
return 0;
printf("create agreement key...\n");
//分别计算Alice、Bob的共享密钥
if (!create_agreementkey(Alice_Key, Bob_publicValue, Alice_privateValue, PRIVATE_VALUE_LEN, ¶ms)){
printf("Bob's public value out of range \n");
return 0;
}
if (!create_agreementkey(Bob_Key, Alice_publicValue, Bob_privateValue, PRIVATE_VALUE_LEN, ¶ms)){
printf("Alice's public value out of range \n");
return 0;
}
shows("Alice's key", Alice_Key, DH_PRIME_LEN(PRIME_BITS));
shows("Bob's key", Bob_Key, DH_PRIME_LEN(PRIME_BITS));
printf("Alice's plaintext: %s\n", input);
printf("Alice encrypts plaintext with Alice_Key...\n");
// Alice加密明文
DES_CBCInit(&context, Alice_Key, iv, 1);
DES_CBCUpdate(&context, output, input, LEN);
shows("Alice creates ciphertext with Alice_Key", output, LEN);
printf("\nAlice sends ciphertext to Bob.\n\n");
printf("Bob decrypts ciphertex with Bob_Key...\n");
// Bob解密密文
DES_CBCInit(&context, Bob_Key, iv, 0);
DES_CBCUpdate(&context, output2, output, LEN);
printf("decrypted ciphertext: %s\n", output2);
Destory_DH_Params(¶ms);
R_RandomFinal(&randomStruct);
return 0;
}
<file_sep>/README.md
# Cryptography-Experiment
<file_sep>/实验题目/实验5 - 数字信封/DES_CBC_RSA/des_cbc_rsa.cpp
#include <string.h>
#include <stdlib.h>
#include "R_STDLIB.C"
#include "R_RANDOM.C"
#include "NN.C"
#include "RSA.C"
#include "DIGIT.C"
#include "MD5C.C"
#include "PRIME.C"
#include "R_KEYGEN.C"
#include "DESC.C"
#include <stdio.h>
#define TEXT_LEN 16 //明密文长度
struct SealCipherText
{
unsigned char output[TEXT_LEN]; //密文
unsigned int outputlen; //密文长度
unsigned char sealedkey[MAX_ENCRYPTED_KEY_LEN]; //封装后的会话密钥
unsigned int sealedkeylen; //封装后的会话密钥长度
};
// 以十六进制形式显示output中的内容
void shows (char *msg, unsigned char *output, unsigned int len)
{ printf ("%s: ", msg);
for (unsigned int i=0; i<len; i++)
printf("%x", output[i]);
printf("\n");
}
//填充随机数结构体
void seed_randomStruct (unsigned char *seed, R_RANDOM_STRUCT *randomStruct)
{
unsigned int bytesNeeded = 256; //结构体所需种子长度
R_RandomInit (randomStruct);
while (bytesNeeded > 0)
{
R_RandomUpdate (randomStruct, seed,
strlen((char *)seed));
R_GetRandomBytesNeeded (&bytesNeeded, randomStruct);
}
}
//产生RSA密钥
void create_RSAkey (R_RSA_PUBLIC_KEY *publicKey, R_RSA_PRIVATE_KEY *privateKey, unsigned int modul_bits,
int useFermat4, R_RANDOM_STRUCT *randomStruct)
{
R_RSA_PROTO_KEY protoKey;
int flag;
protoKey.bits = modul_bits; //设定模数长度
protoKey.useFermat4 = useFermat4; //设定e
flag = R_GeneratePEMKeys
(publicKey, privateKey, &protoKey, randomStruct); // 产生RSA密钥
if (RE_MODULUS_LEN == flag)
{ printf ("modulus length invalid\n"); exit(0); }
else if (RE_NEED_RANDOM == flag)
{ printf ("randomStruct is not seeded\n"); exit(0); }
}
//数字信封封装(加密)
void SealEnc (SealCipherText *sealedtext,
unsigned char *input, int inputlen, R_RSA_PUBLIC_KEY *publicKey,
unsigned char iv[8], R_RANDOM_STRUCT *randomStruct)
{
unsigned char key[8] = ""; // 对称会话密钥
DES_CBC_CTX context;
R_GenerateBytes (key, 8, randomStruct); //产生随机对称会话密钥
// 加密明文
DES_CBCInit(&context, key, iv, 1);
DES_CBCUpdate(&context, sealedtext->output, input, inputlen);
sealedtext->outputlen = inputlen;
// 加密key
RSAPublicEncrypt (sealedtext->sealedkey, &(sealedtext->sealedkeylen), key, 8, publicKey, randomStruct);
}
//数字信封解封(解密)
void SealDec (unsigned char *output2, SealCipherText *sealedtext,
R_RSA_PRIVATE_KEY *privateKey, unsigned char iv[8])
{
DES_CBC_CTX context;
unsigned char key[8];
unsigned int keylen;
//恢复key
RSAPrivateDecrypt (key, &keylen, sealedtext->sealedkey, sealedtext->sealedkeylen, privateKey);
//解密密文
DES_CBCInit (&context, key, iv, 0);
DES_CBCUpdate(&context, output2, sealedtext->output, sealedtext->outputlen);
}
int main(int argc, char* argv[])
{
unsigned char seed[] = "3adqwe1212asd"; // 种子
unsigned char iv[8+1] = "13wedfgr"; // IV
unsigned char input[TEXT_LEN+1] = "12345678abcdefgh"; // 明文
unsigned char output2[TEXT_LEN+1] = ""; // 恢复的明文
SealCipherText sealedtext;
R_RSA_PUBLIC_KEY publicKey;
R_RSA_PRIVATE_KEY privateKey;
R_RANDOM_STRUCT randomStruct;
// 显示明文
printf ("plaintext: %s\n", input);
seed_randomStruct (seed, &randomStruct); // 填充随机数结构体
create_RSAkey (&publicKey, &privateKey, 1024, 1, &randomStruct); //产生RSA公钥
// 数字信封封装(加密)
SealEnc (&sealedtext, input, TEXT_LEN, &publicKey, iv, &randomStruct);
// 显示密文和封装后的会话密钥
shows("ciphertext", sealedtext.output, TEXT_LEN);
shows("sealed key", sealedtext.sealedkey, sealedtext.sealedkeylen);
// 数字信封解封(解密)
SealDec (output2, &sealedtext, &privateKey, iv);
// 显示恢复出的明文
printf("decrypted ciphertext: %s\n", output2);
R_RandomFinal (&randomStruct);
return 0;
}
<file_sep>/实验题目/实验3 - 基于口令的加密/PBE/pbe.cpp
//#include "stdafx.h"
#include <string.h>
#include "R_STDLIB.C"
#include "R_RANDOM.C"
#include "MD5C.C"
#include "DESC.C"
#include <stdio.h>
//以十六进制形式显示output中的内容
void shows (unsigned char *output, int len)
{ printf ("ciphertext: ");
for (int i=0; i<len; i++)
printf("%x", output[i]);
printf("\n");
}
//将output中的内容复制到input中
void copys (unsigned char *output, unsigned char *input, int len)
{ for (int i=0; i< len; i++)
input[i] = output[i];
}
//产生salt
void create_salt (unsigned char *salt, int saltlen, unsigned char *seed)
{
R_RANDOM_STRUCT randomStruct; //随机数结构体
unsigned int bytesNeeded = 256; //结构体所需种子长度
R_RandomInit (&randomStruct);
while (bytesNeeded > 0)
{
R_GetRandomBytesNeeded (&bytesNeeded, &randomStruct);
R_RandomUpdate (&randomStruct, seed, strlen((char *)seed));
}
R_GenerateBytes (salt, saltlen, &randomStruct);
R_RandomFinal (&randomStruct);
}
//用MD5混合口令和salt
void MixPwdSalt (unsigned char *password,
unsigned char *salt, unsigned char result[16])
{
MD5_CTX context;
MD5Init (&context);
MD5Update (&context, password,
strlen((char *)password));
MD5Update (&context, salt, strlen((char *)salt));
MD5Final (result, &context);
}
//用DES加解密
void DES_deal (unsigned char *input, unsigned char *output,
unsigned char *output2, int len,
unsigned char key[8], unsigned char *iv)
{
DES_CBC_CTX context;
//显示明文
printf ("plaintext: %s\n", input);
//加密
DES_CBCInit(&context, key, iv, 1);
DES_CBCUpdate(&context, output, input, len);
//显示密文
shows (output, len);
//解密
DES_CBCInit (&context, key, iv, 0);
DES_CBCUpdate(&context, output2, output, len);
//显示解密后的密文
printf("decrypted ciphertext: %s\n",output2);
}
#define SALT_LEN 10 //产生的salt的长度
#define BLOCK_LEN 16 //明密文长度
int main(int argc, char* argv[])
{
unsigned char seed[] = "3adqwe1212asd"; // 种子
unsigned char salt[SALT_LEN]; // 保存输出的salt
unsigned char password[10]; // 口令
unsigned char mixedresult[16]; // 保存混合后的结果
unsigned char key[8]; // 用于DES的密钥
unsigned char iv[8+1] = "abcdfgji"; // IV
unsigned char input[BLOCK_LEN+1] = "12345678abcdefgh"; // 明文
unsigned char output[BLOCK_LEN]="", output2[BLOCK_LEN+1]="";
create_salt (salt, SALT_LEN, seed); // 产生salt
printf ("please input your password:");
scanf ("%s", password); // 输入口令
MixPwdSalt (password, salt, mixedresult); // 混合salt和口令
copys (mixedresult, key, 8);
DES_deal (input, output, output2, BLOCK_LEN, key, iv);
return 0;
}
<file_sep>/实验题目/实验1 - Hash算法MD5/Hash_MD5/MD5.CPP
#include <string.h>
#include <iostream>
#include <stdlib.h>
#include "MD5C.C"
void main(int argc, char* argv[])
{
FILE *fp;
unsigned char line[1000];
unsigned char digest[16]; //用于保存最终的散列值
if(NULL ==(fp= fopen("filename.txt", "r" )))
{
printf("open file error");
exit(0);
}
MD5_CTX context; //上下文变量
//unsigned char s_input[] = "hello,world"; //要处理的串
//利用三个函数产生散列值
MD5Init (&context);
while(fgets((char *)line, 1000, fp))
{
//对line进行处理;
MD5Update (&context, line, strlen((char *)line));
}
MD5Final (digest, &context);
//以十六进制字符串形式输出128位散列值
for (int i=0; i<16; i++)
printf("%x", digest[i]);
printf("\n");
fclose( fp );
}
<file_sep>/实验题目/实验2 - DES-CBC/DES_CBC/DES.CPP
#include <string.h>
#include "R_STDLIB.C"
#include "DESC.C"
#include <stdio.h>
//以十六进制形式显示output中的内容
void shows (unsigned char *output, int len)
{ printf ("ciphertext: ");
for (int i=0; i<len; i++)
printf("%x", output[i]);
printf("\n");
}
#define LEN 16 //明文长度
int main(int argc, char* argv[])
{
DES_CBC_CTX context;
//明文串input、密文串output、解密后的明文串output2
unsigned char input[LEN+1] = "12345678abcdefgh", output[LEN]="", output2[LEN+1]="";
//密钥key,初始向量iv
unsigned char key[8+1] = "qwertyui", iv[8+1] = "abcdfgji";
//显示明文
printf("plaintext: %s\n", input);
//加密
DES_CBCInit(&context, key, iv, 1);
DES_CBCUpdate(&context, output, input, LEN);
//显示密文
shows (output, LEN);
//解密
DES_CBCInit (&context, key, iv, 0);
DES_CBCUpdate(&context, output2, output, LEN);
//显示解密后的密文
printf("decrypted ciphertext: %s\n",output2);
return 0;
}
<file_sep>/实验题目/实验4 - CBC-MAC/MAC/mac.cpp
//#include "stdafx.h"
#include <string.h>
#include <stdlib.h>
#include "R_STDLIB.C"
#include "MD5C.C"
#include "DESC.C"
#include "R_RANDOM.C"
#include <stdio.h>
//以十六进制形式显示output中的内容
void shows (char * msg, unsigned char *output, int len)
{
printf("%s: ", msg);
for (int i=0; i<len; i++)
printf("%x", output[i]);
printf("\n");
}
//填充随机数结构体
void seed_randomStruct (unsigned char *seed, R_RANDOM_STRUCT *randomStruct)
{
unsigned int bytesNeeded = 256; //结构体所需种子长度
R_RandomInit (randomStruct);
while (bytesNeeded > 0)
{
R_RandomUpdate (randomStruct, seed,
strlen((char *)seed));
R_GetRandomBytesNeeded (&bytesNeeded,
randomStruct);
}
}
//将input和mac中的内容复制到plaintext中
void combines (unsigned char *plaintext, unsigned char *input, unsigned char mac[8], int inputlen)
{
for (int i=0; i< inputlen; i++)
plaintext[i] = input[i];
for (int j=0; j<8; j++)
plaintext[inputlen+j] = mac[j];
}
//将恢复后明文中的内容拆分到output和mac
void decombines (unsigned char *plaintext, unsigned char *output, unsigned char mac[8], int outputlen)
{
for (int i=0; i< outputlen; i++)
output[i] = plaintext[i];
output[outputlen] = '\0';
for (int j = 0; j < 8; j++)
mac [j] = plaintext[outputlen+j];
}
//产生MAC
void create_MAC(unsigned char mac_output[8], unsigned char *input,
int len, unsigned char key[8], unsigned char iv[8])
{
unsigned char *output;
DES_CBC_CTX context;
output = new unsigned char[len];
DES_CBCInit(&context, key, iv, 1);
DES_CBCUpdate(&context, output, input, len);
for (int i=0; i<8;i++)
mac_output[i] = output[len-8+i];
delete []output;
}
//比较收到的MAC和对收到消息产生的MAC
int compares(unsigned char received_mac[8],
unsigned char new_mac[8])
{
for(int i=0; i<8; i++)
{
if (received_mac[i] != new_mac[i])
return 0;
}
return 1;
}
#define TEXT_LEN 16 // 消息长度
#define PLAINTEXT_LEN TEXT_LEN + 8 // 明密文长度(不包括字符串末尾的'\0')
int main(int argc, char* argv[])
{
unsigned char seed[] = "12312ae12qweqweqweqe";
unsigned char key1[8], key2[8], iv1[8]={0}, iv2[8];
unsigned char input[TEXT_LEN+1] = "12345678abcdefgh"; // 保存发送的消息
unsigned char mac[8]; // 保存对发送消息产生的MAC
unsigned char plaintext[PLAINTEXT_LEN]; // 保存明文
unsigned char ciphertext[PLAINTEXT_LEN]; // 保存密文
unsigned char plaintext2[PLAINTEXT_LEN]; // 保存恢复的明文
unsigned char output[TEXT_LEN+1]; // 保存恢复后的消息
unsigned char received_mac[8]; // 保存恢复后的MAC
unsigned char new_mac [8]; // 保存对接收到的消息产生的MAC
R_RANDOM_STRUCT randomStruct;
DES_CBC_CTX context;
seed_randomStruct (seed, &randomStruct);
//产生密钥和IV, 分别用于产生MAC和加解密
R_GenerateBytes (key1, 8, &randomStruct);
R_GenerateBytes (key2, 8, &randomStruct);
R_GenerateBytes (iv2, 8, &randomStruct);
// 显示发送消息
printf ("sent message: %s\n", input);
//对发送的消息产生MAC
create_MAC(mac, input, TEXT_LEN, key1, iv1);
// 显示MAC
shows("sent MAC", mac, 8);
//组合消息和MAC为明文
combines (plaintext, input, mac, TEXT_LEN);
//加密
DES_CBCInit(&context, key2, iv2, 1);
DES_CBCUpdate(&context, ciphertext, plaintext,
PLAINTEXT_LEN);
// 显示密文
shows("ciphertext", ciphertext, PLAINTEXT_LEN);
//ciphertext[10] = ciphertext[10] + 1; //改变密文的一个字节
//解密
DES_CBCInit(&context, key2, iv2, 0);
DES_CBCUpdate(&context, plaintext2, ciphertext,
PLAINTEXT_LEN);
//将密文中的内容拆分到output和received_mac
decombines (plaintext2, output, received_mac, TEXT_LEN);
//显示解密后的消息和MAC
printf("reveived message: %s\n", output);
shows("received MAC", received_mac, 8);
//对收到的消息产生MAC, 并显示
create_MAC(new_mac, output, TEXT_LEN, key1, iv1);
shows("MAC for received message", new_mac, 8);
//校验MAC
if (compares(received_mac, new_mac))
printf ("received message OK!\n");
else
printf ("received message ERROR!\n");
R_RandomFinal (&randomStruct);
return 0;
}
|
f01548b2864e1aac7759de5a1b6d5e0ff0039444
|
[
"Markdown",
"C",
"C++"
] | 10 |
C++
|
LilWingXYZ/Cryptography-Experiment
|
e5abc7ec69251dc1e8e09989cf704d4a27c6130c
|
1d0cfb04a9c45fffd51cba115773b7b322a38fa7
|
refs/heads/main
|
<file_sep>import React, { Component } from "react";
import ReactApexChart from "react-apexcharts";
// const formCard = () => {
class chart extends Component {
constructor(props) {
super(props);
this.state = {
series: [
{
name: "DAI",
// data: series.monthDataSeries1.prices
data: [30, 40, 45, 50, 49, 60, 70, 91],
},
],
options: {
colors: ["#fff"],
grid: {
show: true,
borderColor: "#90A4AE",
strokeDashArray: 0,
position: "back",
xaxis: {
lines: {
show: false,
},
},
yaxis: {
lines: {
show: false,
},
},
row: {
colors: undefined,
opacity: 0.5,
},
column: {
colors: undefined,
opacity: 0.5,
},
padding: {
top: 0,
right: 0,
bottom: 0,
left: 0,
},
},
chart: {
foreColor: "white",
type: "area",
height: 350,
zoom: {
enabled: false,
},
},
fill: {
colors: "#FDFEFF",
opacity: 0.1,
type: "solid",
},
dataLabels: {
enabled: false,
},
stroke: {
curve: "smooth",
width: 1,
},
title: {
text: "",
align: "left",
},
subtitle: {
text: "",
align: "left",
},
// labels: series.monthDataSeries1.dates,
xaxis: {
categories: [1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998],
// type: 'datetime',
title: {
text: "supply",
rotate: 0,
offsetX: 0,
offsetY: 0,
style: {
color: undefined,
fontSize: "12px",
fontFamily: "Montserrat",
fontWeight: 700,
cssClass: "apexcharts-yaxis-title",
},
},
},
yaxis: {
// opposite: true
title: {
text: "price",
rotate: 0,
offsetX: 0,
offsetY: 0,
style: {
color: undefined,
fontSize: "12px",
fontFamily: "Montserrat",
fontWeight: 700,
cssClass: "apexcharts-yaxis-title",
},
},
},
legend: {
horizontalAlign: "right",
},
tooltip: {
enabled: true,
enabledOnSeries: undefined,
shared: true,
followCursor: false,
intersect: false,
inverseOrder: false,
custom: undefined,
fillSeriesColor: false,
// theme: false,
style: {
fontSize: "12px",
fontFamily: undefined,
// color: "red",
},
},
annotations: {
yaxis: [
{
y: 60,
borderColor: "#fff",
label: {
borderColor: "transparent",
style: {
// color: "#FDFEFF",
background: "transparent",
// foreColor: "##FDFEFF",
},
text: "Current price: 1.6425 DAI",
},
},
],
xaxis: [
{
// x: new Date('23 Nov 2017').getTime(),
x: 1996,
borderColor: "#fff",
label: {
style: {
color: "#fff",
},
// text: 'X-axis annotation - 22 Nov'
},
},
],
},
},
};
}
render() {
return (
<div id="chart">
<ReactApexChart
options={this.state.options}
series={this.state.series}
type="area"
height={350}
/>
</div>
);
}
}
export default chart;
<file_sep>import React from 'react';
import { Button } from "react-bootstrap";
import { Link } from "react-router-dom";
import NavBar from "../../components/NavBar/Navbar";
import Footer from "../../components/Footer";
import "./comingSoon.css";
// var perf = require("./template.html");
const pageNotFound = () => {
return (
// <iframe src={perf}></iframe>
<div className="">
<NavBar></NavBar>
<div
className="container box-center text-center mt-5 mb-5"
style={{ height: "100%", height: "50vh" }}
>
{/* <h1 className="soon_h1">Governance</h1> */}
<div style={{ width: "100%" }}>
<h1 className="soon_h1">Coming Soon</h1>
</div>
<div style={{ width: "100%" }}>
<p className="soon_p"> Page currently under Development.</p>
</div>
<div style={{ width: "100%" }}>
<Button className="hb-btn-custom" variant="primary">
<Link className="soon-btn" to="/home">
Way Back "Home"
</Link>
</Button>
</div>
</div>
<Footer></Footer>
</div>
);
};
export default pageNotFound;<file_sep>// Imp React Imports
import React, { Component } from 'react';
import {
BrowserRouter as Router,
Route,
Switch,
Link,
Redirect
} from 'react-router-dom';
// CSS imports
import './App.css';
import "bootstrap/dist/css/bootstrap.min.css";
// Pages Imports
import Home from "../src/page/index.jsx";
import page404 from '../src/page/404/404';
import pool2 from "../src/page/pool1/pool1";
import ldrpool from "../src/page/ldr_pool/ldr_pool";
import Pool1 from "../src/page/pool2/pool2.js";
import migration from "../src/page/migration/migration.js";
import auction from "../src/page/auction/auction";
import createBadp from "../src/page/createBadp/createBadp";
import display from "../src/page/display/display";
import Soon from "../src/page/comingSoon/comingSoon";
// Actual Page render
class App extends Component {
render() {
return (
<Router>
<Switch>
<Route exact path="/" component={Home} />
<Route path="/home" component={Home} />
<Route path="/pool1" component={Pool1} />
<Route path="/ldrpool" component={ldrpool} />
<Route path="/pool2" component={pool2} />
<Route path="/migration" component={migration} />
<Route path="/auction" component={auction} />
<Route path="/createBadp" component={createBadp} />
<Route path="/display" component={display} />
<Route path="/soon" component={Soon} />
<Route path="/404" component={page404}></Route>
<Redirect to="/404" />
</Switch>
</Router>
);
}
}
export default App;
<file_sep>import React from "react";
// import ReactDOM from "react-dom";
import Carousel from "react-elastic-carousel";
import Item from "./Item";
import "./carasolstyles.css";
// import BorderChanger from './customScript';
const breakPoints = [
{ width: 1, itemsToShow: 1 },
{ width: 550, itemsToShow: 2, itemsToScroll: 2 },
{ width: 768, itemsToShow: 3 },
// { width: 1200, itemsToShow: 4 },
];
function carasol() {
return (
<div className="App">
<Carousel breakPoints={breakPoints}>
<Item class="">
<div class="cara_module item_border1 item1">
<ul>
<li>Additional Texton main text ipsum dolor sit amet.</li>
<li>Additional Texton main text ipsum dolor sit amet.</li>
<li>Additional Texton main text ipsum dolor sit amet.</li>
<li>Additional Texton main text ipsum dolor sit amet.</li>
</ul>
<h2>Phase 0</h2>
</div>
</Item>
<Item>
{" "}
<div class="cara_module item2 item_border2">
<ul>
<li>Additional Texton main text ipsum dolor sit amet.</li>
<li>Additional Texton main text ipsum dolor sit amet.</li>
<li>Additional Texton main text ipsum dolor sit amet.</li>
<li>Additional Texton main text ipsum dolor sit amet.</li>
</ul>
<h2>Phase 1</h2>
</div>
</Item>
<Item>
{" "}
<div class="cara_module item3 item_border3">
<ul>
<li>Additional Texton main text ipsum dolor sit amet.</li>
<li>Additional Texton main text ipsum dolor sit amet.</li>
<li>Additional Texton main text ipsum dolor sit amet.</li>
<li>Additional Texton main text ipsum dolor sit amet.</li>
</ul>
<h2>Phase 2</h2>
</div>
</Item>
<Item>
{" "}
<div class="cara_module item4 item_border4">
<ul>
<li>Additional Texton main text ipsum dolor sit amet.</li>
<li>Additional Texton main text ipsum dolor sit amet.</li>
<li>Additional Texton main text ipsum dolor sit amet.</li>
<li>Additional Texton main text ipsum dolor sit amet.</li>
</ul>
<h2>Phase 3</h2>
</div>
</Item>
<Item>
{" "}
<div class="cara_module item5 item_border5">
<ul>
<li>Additional Texton main text ipsum dolor sit amet.</li>
<li>Additional Texton main text ipsum dolor sit amet.</li>
<li>Additional Texton main text ipsum dolor sit amet.</li>
<li>Additional Texton main text ipsum dolor sit amet.</li>
</ul>
<h2>Phase 4</h2>
</div>
</Item>
<Item>
{" "}
<div class="cara_module item6 item_border6">
<ul>
<li>Additional Texton main text ipsum dolor sit amet.</li>
<li>Additional Texton main text ipsum dolor sit amet.</li>
<li>Additional Texton main text ipsum dolor sit amet.</li>
<li>Additional Texton main text ipsum dolor sit amet.</li>
</ul>
<h2>Phase 5</h2>
</div>
</Item>
</Carousel>
</div>
);
}
export default carasol;<file_sep>
import React from "react";
import { Button, Row,Col } from "react-bootstrap";
import {Link} from "react-router-dom";
import './card.css';
import Slider from '../slider/slider';
import CustomInput from '../input/input'
import DropZone from '../dropzone/DropZone';
function auctionCard() {
return (
<div className="auction-card-container">
<div className="auction-card-wrapper">
<div className="card_module">
<Row>
<Col>
<Row className="justify-content-center">
<Slider className="" />
</Row>
<hr />
<Row>
<CustomInput />
</Row>
</Col>
<Col>
<div className="content">
<h1 className="slider_p">ATTACHMENT</h1>
<DropZone />
</div>
</Col>
</Row>
<Row>
<Col className="d-flex justify-content-center">
<Button className="hb-btn-custom" variant="primary">
{/* <Link className="four04-btn" to="/home"> */}
Submit
{/* </Link> */}
</Button>
</Col>
</Row>
</div>
</div>
</div>
);
};
export default auctionCard;
<file_sep>import React from "react";
import { Form,FormControl,InputGroup, Button } from "react-bootstrap";
import "./formCard.css";
import logo1 from "../../../Images/assets/LDR.png";
import logo2 from "../../../Images/assets/Dai_Logo1.png";
const formCard = () => {
return (
<div className=" formCard-card-container">
<div className="formCard-card-wrapper">
<div className="formCard_card_module">
<div className="formCard_custom_col">
<img className="img_css" src={logo2} alt="DAI"></img>
<h2 className="formCard_custom_h2">0.00 DAI</h2>
</div>
<div className="formCard_custom_col">
<img className="img_css" src={logo1} alt="LDR"></img>
<h2 className="formCard_custom_h2">0.00 LSD</h2>
</div>
<div className="formCard_custom_col1">
<p className="formCard_custom_p">Unlocked DAI :</p>
<p className="formCard_custom_p1">0.00 LSD</p>
</div>
<hr className="" />
<div className="formCard_custom_col mt-4">
<p className="formCard_custom_p">Price</p>
<p className="formCard_custom_p1">0</p>
</div>
<hr className="" />
<div className="formCard_custom_col mt-3">
<p className="formCard_custom_p">Recieve Amount</p>
<p className="formCard_custom_p1">0 LSD</p>
</div>
<hr className="" />
<div className="formCard_custom_col mt-3">
<p className="formCard_custom_p">Slippage</p>
<p className="formCard_custom_p1">0</p>
</div>
<hr className="" />
<div className="">
<InputGroup className="mt-3 mb-3 formCard_input_div">
<FormControl
placeholder="0"
aria-label="DAI"
aria-describedby="basic-addon1"
className="formControl_badp"
/>
<Button className="btn_max">MAX</Button>
<h2 className="formControl_label">DAI</h2>
</InputGroup>
</div>
<div className="" style={{ width: "100%" }}>
<Button className="formCard_btn">BUY 0 LSD for 0 DAI</Button>
</div>
<div style={{ width: "100%" }}>
<Form.Group controlId="formBasicCheckbox">
<Form.Check
type="checkbox"
label="I accept the terms and conditions"
className="formBasicCheckbox"
/>
</Form.Group>
</div>
</div>
</div>
</div>
);
};
export default formCard;
<file_sep>import React from "react";
import './input.css';
function customInput() {
return (
<div class="page">
{/* <label class="field field_v1">
<input class="field__input" placeholder="e.g. Name Here" />
<span class="field__label-wrap">
<span class="field__label">URL :</span>
</span>
</label> */}
<label class="field field_v2">
<input class="field__input" placeholder="e.g. Enter URL Here" />
<span class="field__label-wrap">
<span class="field__label">URL :</span>
</span>
</label>
<label class="field field_v2">
<input class="field__input" placeholder="e.g. Enter Alter Here" />
<span class="field__label-wrap">
<span class="field__label">Alter :</span>
</span>
</label>
<label class="field field_v2">
<input class="field__input" placeholder="e.g. Industry Name Here" />
<span class="field__label-wrap">
<span class="field__label">Industry :</span>
</span>
</label>
{/* <label class="field field_v3">
<input class="field__input" placeholder="e.g. <EMAIL>" />
<span class="field__label-wrap">
<span class="field__label">E-mail</span>
</span>
</label> */}
</div>
);
};
export default customInput;<file_sep>// import { render } from '@testing-library/react';
import React from 'react';
import PropTypes from "prop-types";
import { withStyles, makeStyles } from "@material-ui/core/styles";
import Slider from "@material-ui/core/Slider";
// import Typography from "@material-ui/core/Typography";
import Tooltip from "@material-ui/core/Tooltip";
import { Row, Col } from "react-bootstrap";
import Input from "@material-ui/core/Input";
import "./slider.css";
const useStyles = makeStyles((theme) => ({
root: {
width: 300 + theme.spacing(3) * 2,
},
margin: {
height: theme.spacing(3),
},
}));
function ValueLabelComponent(props) {
const { children, open, value } = props;
return (
<Tooltip open={open} enterTouchDelay={0} placement="top" title={value}>
{children}
</Tooltip>
);
}
ValueLabelComponent.propTypes = {
children: PropTypes.element.isRequired,
open: PropTypes.bool.isRequired,
value: PropTypes.number.isRequired,
};
const PrettoSlider = withStyles({
root: {
height: 8,
color: "#4029EF",
},
thumb: {
height: 24,
width: 28,
backgroundColor: "#4029EF",
border: "2px solid white",
marginTop: -8,
marginLeft: -12,
"&:focus, &:hover, &$active": {
boxShadow: "inherit",
},
borderRadius: "10px",
},
active: {},
valueLabel: {
left: "calc(-18% )",
},
track: {
height: 8,
borderRadius: 4,
// color: 'blue',
background:
"linear-gradient(90deg, #3C26EE 2.7%, #9A4EBD 50.45%, #CE4A67 99.21%)",
},
rail: {
height: 8,
borderRadius: 4,
background: "#53526A",
},
})(Slider);
export default function CustomizedSlider() {
const classes = useStyles();
const [value, setValue] = React.useState(30);
const handleSliderChange = (event, newValue) => {
setValue(newValue);
};
const handleInputChange = (event) => {
setValue(event.target.value === "" ? "" : Number(event.target.value));
};
return (
<div className={classes.root}>
<Row className="slider-row">
<Col sm={2} className="">
<p className="slider_p">Pixel</p>
</Col>
<Col className="">
{/* <Typography gutterBottom>pretto.fr</Typography> */}
<PrettoSlider
onChange={handleSliderChange}
value={typeof value === "number" ? value : 0}
valueLabelDisplay="auto"
aria-label="pretto slider"
defaultValue={20}
/>
</Col>
<Col sm={2} className="">
<Input
className={classes.input}
value={value}
margin="dense"
onChange={handleInputChange}
// onBlur={handleBlur}
inputProps={{
step: 1,
min: 0,
max: 100,
type: "number",
"aria-labelledby": "input-slider",
}}
style={{ color: "white", width: "35px" }}
/>
</Col >
<Col sm={2} className="slider-col">
<p className="slider_p">K Pxl</p>
</Col>
</Row>
</div>
);
}
<file_sep>import React from "react";
import {Table} from "react-bootstrap";
// import ReactApexChart from "react-apexcharts";
import Chart from "./chart";
import "./graph.css";
const formCard = () => {
return (
<div className=" graph-card-container">
<div className="graph-card-wrapper">
<div className="graph_card_module">
<div className="graph_table_div" style={{ width: "100%" }}>
<Table className="graph_table_css ">
<tr>
<td>Buy</td>
<td>Reserve</td>
<td>Curve Insurance</td>
<td>Total Supply</td>
</tr>
{/* </Table> */}
{/* <hr className="graph_table_divider mb-3"></hr> */}
{/* <Table className="graph_table_css table-borderless"> */}
<tr>
<th>1.742 DAI</th>
<th>40,577,123 DAI</th>
<th>40,577,123 LDR</th>
<th>40,577,123 LDR</th>
</tr>
</Table>
</div>
<div style={{width:"100%"}}>
<Chart></Chart>
</div>
</div>
</div>
</div>
);
};
export default formCard;
|
a2c188784e3f5f6960a5cf44fc7e6c3b930b5dc0
|
[
"JavaScript"
] | 9 |
JavaScript
|
NasushapAC/Liberty-project
|
19eb5aea8a07a0977b3986680ef3c1940861c81e
|
f99a90986a47a1451e0d1bad45b44c2f56b61645
|
refs/heads/master
|
<repo_name>luis572/Proyecto1AREP<file_sep>/src/main/java/edu/escuelaing/arem/sockets/Socketcliente.java
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package edu.escuelaing.arem.sockets;
import java.io.IOException;
import java.net.ServerSocket;
import java.net.Socket;
/**
* Declaracion de la clase ClientSckt,
* socket, se comunica con el servidor,
* le envia peticiones
* recibe respuestas
* @author Luis
*/
public class Socketcliente {
public static Socket servidor(ServerSocket serverSocket) {
Socket clientSocket= null;
try {
System.out.println("Listo para recibir ...");
clientSocket = serverSocket.accept();
} catch (IOException e) {
System.err.println("Accept failed.");
System.exit(1);
}
return clientSocket;
}
}
<file_sep>/src/main/java/edu/escuelaing/arem/framework/pojo.java
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package edu.escuelaing.arem.framework;
import edu.escuelaing.arem.*;
/**
*
* @author 2137497
*/
public class pojo {
@web("/cuadrado")
public static String cuadraro(String n){
int respuesta=Integer.parseInt(n);
respuesta=respuesta*respuesta;
return Integer.toString(respuesta);
}
@html("/cuadra")
public static String prueba(String n){
int respuesta=Integer.parseInt(n);
respuesta=respuesta*respuesta;
return Integer.toString(respuesta);
}
@web("/suma")
public static String suma(String n,String n2){
int respuesta=Integer.parseInt(n);
int respuesta2=Integer.parseInt(n2);
return Integer.toString(respuesta+respuesta2);
}
@web("/sumacuadrados")
public static String sumacuadrados(String n,String n2){
int respuesta=Integer.parseInt(n);
int respuesta2=Integer.parseInt(n2);
return Integer.toString(respuesta*respuesta+respuesta2*respuesta2);
}
}
<file_sep>/README.md
# Proyecto HttpServer y Framework Loc.
Servidor dedicado a recibir solicitudes de un cliente y mostrar paginas web html e imagenes png, Igualmente el servidor provee un framework IoC para la construcción de aplicaciones web a partir de POJOS.
##### Made by:
<NAME>
<file_sep>/src/main/java/edu/escuelaing/arem/httpServer/HttpServer.java
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package edu.escuelaing.arem.httpServer;
import edu.escuelaing.arem.paginas.pagina;
import edu.escuelaing.arem.sockets.SocketServidor;
import edu.escuelaing.arem.sockets.Socketcliente;
import java.net.*;
import java.io.*;
import java.lang.annotation.Annotation;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Declaracion de la clase HttpServer,
* main principal del proyecto, desde donde corre el servidor
* @author <NAME>
*/
public class HttpServer {
private static ServerSocket serverSocket;
private static Socket clientSocket;
public static HashMap<String, Method> metod;
/**
* Creacion del main, que ejecutara todo el proyecto
* @param args
* @throws IOException
*/
public static void main(String[] args) throws IOException {
clientSocket=null;
serverSocket=SocketServidor.servidor();
while(true==true) {
reconocerPojos();
clientSocket = Socketcliente.servidor(serverSocket);
BufferedReader in = new BufferedReader(new InputStreamReader(clientSocket.getInputStream()));
String inputLine="";
String[] get = null;
while ((inputLine = in.readLine()) != null) {
System.out.println("Received: " + inputLine);
if (!in.ready()) {
break;
}
if (inputLine.contains("GET")) {
get = inputLine.split(" ");
System.out.println("Adress to show: "+ get[1]);
}
}
ArrayList<String> parametrosMetodo=new ArrayList<>();
try{
if(get[1].contains(":")){
String[] variables=get[1].split(":");
for(int i=1;i<variables.length;i++){
parametrosMetodo.add(variables[i]);
}
get[1]=get[1].split(":")[0];
}
pagina.tipoArchivo(get[1],clientSocket,metod,parametrosMetodo);
}catch(Exception e){
pagina.tipoArchivo("/index.html",clientSocket,metod,parametrosMetodo);
}
in.close();
clientSocket.close();
}
}
public static String getPageRequest(InputStream is) throws IOException {
is.mark(0);
BufferedReader in = new BufferedReader(new InputStreamReader(is));
String inputLine = null;
while ((inputLine = in.readLine()) != null) {
if (!in.ready())
break;
if (inputLine.contains("GET")) {
String[] get = inputLine.split(" ");
return get[1];
}
break;
}
return "path";
}
/**
* esta clase almacenara todos los pojos con anotacion @web de un directorio dentro de un hashmap la lleve sera la extencion de la
* anotacion y el valor el metodo asociado a dicha anotacioa.
* @param File se debera de enviar la carpeta donde estara todos los pojos
* @param String para mayor facilidad se debe de conocer la ruta relativa de la carpeta en donde se van a extraer los metodos con anotacion web
* @throws ClassNotFoundException
*/
public static void listarFicherosPorCarpeta(final File carpeta,String ruta) throws ClassNotFoundException {
metod=new HashMap<String ,Method >();
for (final File ficheroEntrada : carpeta.listFiles()) {
String copiaRuta=ruta;
String paquete="";
copiaRuta=copiaRuta.replace("/",".");
String namefichero=ficheroEntrada.getName().substring(0,ficheroEntrada.getName().length()-5);
paquete=copiaRuta.substring(14,copiaRuta.length());
Class c=Class.forName(paquete+namefichero);
Method[] metodos=c.getDeclaredMethods();
for(int i=0;i<metodos.length;i++){
Annotation[] tipo=metodos[i].getDeclaredAnnotations();
if(tipo.length>0){
String an=tipo[0].toString().substring(31,tipo[0].toString().length());
String llave=an.substring(10,an.length()-1);
if(an.contains("web") && !metod.containsKey(llave)){
metod.put(llave,metodos[i]);
}
}
}
}
}
/**
* entra al paquete framework y examinara todas las clases que esten en dicho directorios para llamar
* a listar ficheros por carpeta que evaluara archivo por archivo.
*/
public static void reconocerPojos(){
File carpeta = new File("src/main/java/edu/escuelaing/arem/framework/");
try {
listarFicherosPorCarpeta(carpeta,"src/main/java/edu/escuelaing/arem/framework/");
} catch (ClassNotFoundException ex) {
Logger.getLogger(HttpServer.class.getName()).log(Level.SEVERE, null, ex);
}
}
}
|
ebfce4211b891495606ef4ff78bb3b5311deb14a
|
[
"Markdown",
"Java"
] | 4 |
Java
|
luis572/Proyecto1AREP
|
cccc2103921937f2c0534adf5abc7603c9bd2f9d
|
f95dba3e775ed6c4979c3d7723ac82822012b348
|
refs/heads/main
|
<file_sep>#include <stdio.h>
#include <math.h>
int main(){
int n;
printf("\nNhap n = ");
scanf("%d", &n);
if(n < 2){
printf("\n%d khong phai so nguyen to", n);
return 0;
}
int count = 0;
for(int i = 2; i <= sqrt(n); i++){
if(n % i == 0){
count++;
}
}
if(count == 0){
printf("\n%d la so nguyen to", n);
}else{
printf("\n%d khong phai so nguyen to", n);
}
}
<file_sep>#include<stdio.h>
#include<conio.h>
#include<math.h>
#include<string.h>
int main()
{
int cows,bulls,i,s,c,d;
char guess[4],word[4];
clrscr();
printf("Player 1,enter the letters of your word.");
for(i=1;i<=4;i++)
{
scanf("%c",&word[i]);
}
clrscr();
printf("Player2, please get ready");
for(i=1;i<=10;i++)
{
bulls=0;cows=0;
printf("\nEnter Guess %d:",i);
for(c=1;c<=4;c++)
{
scanf("%c",&guess[c]);
}
for(d=1;d<=4;d++)
{
if(guess[d]==word[d]) bulls++;
else {
for(s=1;s<=4;s++)
{
if(guess[d]==word[s]) cows++;
}
}
}
if(bulls==4) printf("You are Victorious");
printf("Bulls = %d, Cows = %d",bulls,cows);
}
getch();
}
<file_sep>strNew = input("Moi ban nhap")
strNew = strNew.lower()
nLen = len(strNew)
nCount = 0
nNum = int(len(strNew)/2)
for i in range (0,nNum):
if (strNew[i] == strNew[nLen - i -1]):
nCount +=1
if (nCount == nNum):
print("Palindrome")
else:
print("No palindrome")
<file_sep>#include<stdio.h>
const char CHAR_55 = 55;
const char CHAR_48 = 48;
int convertNumber(int n, int b) {
if (n < 0 || b <= 2 || b <= 32 ) {
printf("He co so hoac gia tri chuyen doi khong hop le!");
return 0;
}
int i;
char arr[20];
int count = 0;
int m;
int remainder = n;
while (remainder > 0) {
if (b > 10) {
m = remainder % b;
if (m >= 10) {
arr[count] = (char) (m + CHAR_55);
count++;
} else {
arr[count] = (char) (m + CHAR_48);
count++;
}
} else {
arr[count] = (char) ((remainder % b) + CHAR_48);
count++;
}
remainder = remainder / b;
}
// hien thi he co so
for (i = count - 1; i >= 0; i--) {
printf("%c", arr[i]);
}
return 1;
}
int main() {
int n = 12;
printf("So %d trong he co so 2 = ", n);
convertNumber(n, 2);
printf("\nSo %d trong he co so 16 = ", n);
convertNumber(n, 16);
return 1;
}
<file_sep>#include<stdio.h>
long tinhGiaithua(int n) {
if (n > 0) {
return n * tinhGiaithua(n - 1);
} else {
return 1;
}
}
int main() {
int a = 5;
int b = 0;
int c = 10;
printf("Giai thua cua %d la: %d \n", a, tinhGiaithua(a));
printf("Giai thua cua %d la: %d \n", b, tinhGiaithua(b));
printf("Giai thua cua %d la: %d", c, tinhGiaithua(c));
}
<file_sep>#include<stdio.h>
int main()
{ float n;
printf("moi ban nhap");
scanf("%f",&n);
if (n==int(n))
{
if (int(n) % 2==0)
{
printf("day la so chan");
}
else
{
printf("day la so le le");
}
}
else
{
printf("khong hop le");
}
<file_sep>#include<iostream>
using namespace std;
int fibonaci(int s)
{
if(s==0 || s==1)
return 1;
else
return (fibonaci(s-2)+fibonaci(s-1));
}
void main()
{
int n;
cout << "Nhap n: ";
cin >> n;
cout << fibonaci(n) << " ";
cout<<endl;
}
|
0e1cb56ab1e7b4ff22bc75f994ec1a7e7f2cf8b4
|
[
"C++"
] | 7 |
C++
|
vnhng100120/VanHung
|
89e210481e0da8f8edf3afd46ea5269d76e177c5
|
f4b2ecff36b481a05b18add83b0daacb0b575713
|
refs/heads/master
|
<file_sep>//
// ViewController.swift
// Prime Planner
//
// Created by <NAME> on 9/6/18.
// Copyright © 2018 Poods. All rights reserved.
//
import UIKit
class ViewController: UIViewController {
// create and declare our tableview and dateLabel objects
private let tableView = UITableView(frame: .zero, style: .grouped)
private let dateLabel = UILabel()
private var directionIsAscending = true
private var descriptor = ""
// add UI button
private let taskAddButton = UIButton(type: .system)
private let sortButton = UIButton(type: .system)
private let descriptorButton = UIButton(type: .system)
private let descriptors = ["name", "dueDate", "isChecked", "priority"]
private let descriptorsText = ["Name", "Date", "Checked", "Priority"]
private var descNum = 0
// create our static data
// this will change into a 'var' once we have task adding/removing set up,
// because the data will need to change when users add/remove tasks
private var data = [Task]()
// this function is a notification that is called when the view
// of this controller is loaded into the controller. This function
// is only called one time after initialization of the controller.
override func viewDidLoad() {
// hide our navigation bar in the home screen
navigationController?.setNavigationBarHidden(true, animated: false)
// because this function is an override, make sure to call
// the super function first, to ensure that the parent does
// what it needs to do when the view loads.
super.viewDidLoad()
// now that the view is loaded, lets layout our tableview
layoutTableView()
// layout our date view after the tableview is set up
// because we will place the date in the header of the tableview,
// so that it can scroll along with the table
layoutDateLabel()
// layout for task button
layoutTaskAddButton()
// layout for sort buttons
layoutSortingButtons()
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
// if our navigation bar isn't hidden, hide it
if navigationController?.isNavigationBarHidden == false {
navigationController?.setNavigationBarHidden(true, animated: animated)
}
// when this view appears, check for a date change
// if date has changed, update our date label
if dateLabel.text != Date.todayString {
dateLabel.text = Date.todayString
dateLabel.sizeToFit()
}
// reload the data, updating tasks if needed
reloadData()
}
func reloadData() {
// fetch the tasks from the core database
data = jcore.tasks.sort(descriptors[descNum], ascending: directionIsAscending).fetch()
// reload the tableView
tableView.reloadData()
}
func layoutTableView() {
// tell the tableview that we want to setup our own constraints
tableView.translatesAutoresizingMaskIntoConstraints = false
// set the height for each row
tableView.rowHeight = 70
// tableview will source its data from self (UIViewController) and notify self of any actions
tableView.dataSource = self
tableView.delegate = self
// register TaskCell with the tableView
tableView.register(TaskCell.self, forCellReuseIdentifier: "TaskCell")
// add the tableview to the main view of this view controller (self)
view.addSubview(tableView)
// constraint dimensions to match the view (fills the entire view)
NSLayoutConstraint.activate([
tableView.widthAnchor.constraint(equalTo: view.widthAnchor),
tableView.heightAnchor.constraint(equalTo: view.heightAnchor),
tableView.centerXAnchor.constraint(equalTo: view.centerXAnchor),
tableView.centerYAnchor.constraint(equalTo: view.centerYAnchor)
])
}
func layoutDateLabel() {
// create a container to hold our date label
// we create it with a hard-coded frame, because it's going into
// the header of the tableView, which should have a predefined height
let dateContainerView = UIView(frame: CGRect(x: 0, y: 0, width: 1, height: 100))
// setup our date label
dateLabel.translatesAutoresizingMaskIntoConstraints = false
dateLabel.text = Date.todayString
dateLabel.textColor = .red
// size the label view to fit the text inside
dateLabel.sizeToFit()
// add our date label to the container view
dateContainerView.addSubview(dateLabel)
// set our container view as the tableview header
tableView.tableHeaderView = dateContainerView
// constrain the dateLabel center (size is set above) to the container
NSLayoutConstraint.activate([
dateLabel.leadingAnchor.constraint(equalTo: dateContainerView.leadingAnchor, constant: 25),
dateLabel.centerYAnchor.constraint(equalTo: dateContainerView.centerYAnchor),
])
}
func layoutTaskAddButton() {
// container for task button
// when using a container that goes into the header or footer of a tableview
// set the x and y to 0 and the width can be any number > 0, because the
// tableview will resize it to match the width of itself.
// the only thing we have to customize here would be the height, as
// the tableview will not resize that for us.
let taskButContainerView = UIView(frame: CGRect(x: 0, y: 0, width: 1, height: 100))
// when using constraints, always remember to disable this option
// this essential tells the program that we are going
// to set our constraints manually
taskAddButton.translatesAutoresizingMaskIntoConstraints = false
// format and constraints for taskbutton
taskAddButton.setTitle("+", for: .normal)
taskAddButton.titleLabel?.font = UIFont(name: "GeezaPro", size: 30)
taskAddButton.tintColor = .black
taskAddButton.backgroundColor = .white
taskAddButton.setRadius(10)
taskAddButton.sizeToFit()
taskAddButton.addTarget(self, action: #selector(taskAddButtonPressed), for: .touchUpInside)
taskButContainerView.addSubview(taskAddButton)
tableView.tableFooterView = taskButContainerView
// set container size to be below the container
NSLayoutConstraint.activate([
taskAddButton.centerXAnchor.constraint(equalTo: taskButContainerView.centerXAnchor),
taskAddButton.centerYAnchor.constraint(equalTo: taskButContainerView.centerYAnchor),
taskAddButton.widthAnchor.constraint(equalTo: taskAddButton.heightAnchor)
])
}
@objc func taskAddButtonPressed() {
// create our detail controller
let taskDetailViewController = TaskEditViewController()
// create new nav controller for a modal presentation
let navigation = UINavigationController(rootViewController: taskDetailViewController)
// push vc onto the nav stack
present(navigation, animated: true, completion: nil)
}
func layoutSortingButtons() {
let buttonWidth: CGFloat = 60
let buttonHeight: CGFloat = 30
let buttonPadding: CGFloat = 10
// container for sort button
// when using a container that goes into the header or footer of a tableview
// set the x and y to 0 and the width can be any number > 0, because the
// tableview will resize it to match the width of itself.
// the only thing we have to customize here would be the height, as
// the tableview will not resize that for us.
let sortButContainerView = UIView(frame: CGRect(x: 0, y: 0, width: (buttonWidth * 2) + buttonPadding, height: buttonHeight))
sortButContainerView.translatesAutoresizingMaskIntoConstraints = false
// format and constraints for Descriptorbutton
descriptorButton.frame = CGRect(x: 0, y: 0, width: buttonWidth, height: buttonHeight)
descriptorButton.setTitle(descriptorsText[descNum], for: .normal)
descriptorButton.titleLabel?.adjustsFontSizeToFitWidth = true;
descriptorButton.tintColor = .black
descriptorButton.backgroundColor = .white
descriptorButton.setRadius(5)
descriptorButton.addTarget(self, action: #selector(descriptorButtonPressed), for: .touchUpInside)
// adding the descriptor button to the header view
sortButContainerView.addSubview(descriptorButton)
// format and constraints for sortButton
sortButton.frame = CGRect(x: buttonWidth + buttonPadding, y: 0, width: buttonWidth, height: buttonHeight)
sortButton.setTitle("↓", for: .normal)
sortButton.tintColor = .black
sortButton.backgroundColor = .white
sortButton.setRadius(5)
sortButton.addTarget(self, action: #selector(sortButtonPressed), for: .touchUpInside)
// adding a subview so that the button is viewable
sortButContainerView.addSubview(sortButton)
// Adding the previously made subview to the header
tableView.tableHeaderView!.addSubview(sortButContainerView)
NSLayoutConstraint.activate([
sortButContainerView.widthAnchor.constraint(equalToConstant: buttonWidth * 2 + buttonPadding),
sortButContainerView.heightAnchor.constraint(equalToConstant: buttonHeight),
sortButContainerView.centerYAnchor.constraint(equalTo: tableView.tableHeaderView!.centerYAnchor),
sortButContainerView.trailingAnchor.constraint(equalTo: tableView.tableHeaderView!.trailingAnchor, constant: -buttonPadding)
])
}
@objc func sortButtonPressed() {
// If clicked it toggles between the up and down arrow
if(directionIsAscending)
{
sortButton.setTitle("↑", for: .normal)
directionIsAscending = false
}
else
{
sortButton.setTitle("↓", for: .normal)
directionIsAscending = true
}
// This fetches the tasks from the database. Depending on the whether its ascending
// descending and the descriptor, the task order may change
data = jcore.tasks.sort(descriptors[descNum], ascending: directionIsAscending).fetch()
reloadData()
}
@objc func descriptorButtonPressed() {
// If the button is at the end of the list, return to the beginning
if(descNum == descriptors.count - 1)
{
descNum = 0
}
else
{
descNum = descNum + 1
}
// Changes the text on the button to the descriptor name
descriptorButton.setTitle(descriptorsText[descNum], for: .normal)
reloadData()
}
}
// this extends the ViewController to implement the UITableViewDelegate/DataSource
// this extension is not necessary and can be implemented above, straight into the controller,
// but this promotes better code readability and organization
extension ViewController: UITableViewDelegate, UITableViewDataSource {
// datasource function that tells the tableview how many rows to display
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
// we pass the number of objects in our data array,
// since we need one row per data object
return data.count
}
// datasource function that tells the tableview which cell to display for the current row
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
// we dequeue a cell view from the tableview
// tableviews reuse cells, to increase performance,
// especially for large numbers of rows
let cell = tableView.dequeueReusableCell(withIdentifier: "TaskCell", for: indexPath) as! TaskCell
// set our cell properties
let task = data[indexPath.row]
cell.setTask(task: task)
// return our cell to the tableview datasource
return cell
}
// delegate function that is called when a user taps on a cell
func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath) {
// let's deselect this row, so that it doesn't stay selected when we come back
tableView.deselectRow(at: indexPath, animated: true)
// guard function against empty data
guard data.count != 0 else { return }
// get the task from the data array, using the row that was tapped
let task = data[indexPath.row]
// create our detail controller
let taskDetailViewController = TaskEditViewController()
taskDetailViewController.task = task
// push vc onto the nav stack
navigationController?.pushViewController(taskDetailViewController, animated: true)
}
// delegate fuction that will delete task specified by user
func tableView(_ tableView: UITableView, commit editingStyle: UITableViewCellEditingStyle, forRowAt indexPath: IndexPath) {
guard data.count != 0 else { return }
// checks if function passes .delete
if (editingStyle == .delete) {
let task = data[indexPath.row]
// remove task from database
jcore.remove(task)
data.remove(at: indexPath.row)
// reload database
tableView.deleteRows(at: [indexPath], with: .top)
}
}
func tableView(_ tableView: UITableView, canEditRowAt indexPath: IndexPath) -> Bool {
return data.count != 0
}
}
<file_sep>//
// SelectionAnimationController.swift
//
// Created by <NAME> on 10/1/18.
// Copyright © 2018 <NAME>. All rights reserved.
//
import Foundation
import UIKit
// handles the scaling animation of the selection views
class SelectionAnimationController: NSObject, UIViewControllerAnimatedTransitioning {
var presenting = true
var scalingView: UIView?
func transitionDuration(using transitionContext: UIViewControllerContextTransitioning?) -> TimeInterval {
return 0.4
}
func animateTransition(using transitionContext: UIViewControllerContextTransitioning) {
guard
let fromVC = transitionContext.viewController(forKey: UITransitionContextViewControllerKey.from),
let toVC = transitionContext.viewController(forKey: UITransitionContextViewControllerKey.to)
else { return }
let containerView = transitionContext.containerView
if presenting {
containerView.addSubview(toVC.view)
let endFrame = transitionContext.finalFrame(for: toVC)
let startFrame = endFrame
toVC.view.frame = startFrame
toVC.view.alpha = 0
scalingView?.setScale(0.9)
UIView.animate(withDuration: transitionDuration(using: transitionContext), animations: ({
fromVC.view.tintAdjustmentMode = .dimmed
toVC.view.alpha = 1
self.scalingView?.setScale(1)
})) { _ in
transitionContext.completeTransition(!transitionContext.transitionWasCancelled)
}
} else {
UIView.animate(withDuration: transitionDuration(using: transitionContext), animations: ({
toVC.view.tintAdjustmentMode = .automatic
fromVC.view.alpha = 0
self.scalingView?.setScale(0.9)
})) { _ in
transitionContext.completeTransition(true)
}
}
}
}
<file_sep>//
// CalendarTaskListView.swift
// Prime Planner
//
// Created by <NAME> on 10/25/18.
// Copyright © 2018 Poods. All rights reserved.
//
import Foundation
import UIKit
class CalendarTaskListView: UIView, UITableViewDelegate, UITableViewDataSource {
private let rowHeight: CGFloat = 60
var date = Date()
let tableView = UITableView(frame: .zero, style: .plain)
var parent: ViewControllerCalender!
var range: (start: Date, end: Date)?
var tasks = [Task]()
init() {
super.init(frame: .zero)
backgroundColor = UIColor.clear
layoutTableView()
filterTasksInDateRange(Date().beginningOfDay.dayRange)
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
func layoutTableView() {
tableView.translatesAutoresizingMaskIntoConstraints = false
tableView.backgroundColor = .white
tableView.showsVerticalScrollIndicator = false
tableView.delegate = self
tableView.dataSource = self
tableView.rowHeight = rowHeight
tableView.register(TaskCell.self, forCellReuseIdentifier: "TaskCell")
addSubview(tableView)
NSLayoutConstraint.activate([
tableView.widthAnchor.constraint(equalTo: widthAnchor),
tableView.heightAnchor.constraint(equalTo: heightAnchor),
tableView.centerXAnchor.constraint(equalTo: centerXAnchor),
tableView.centerYAnchor.constraint(equalTo: centerYAnchor)
])
}
func deleteTaskAtIndexPath(_ indexPath: IndexPath) {
// get and remove task
let task = tasks[indexPath.row]
jcore.remove(task)
// delete the rows from the table
finishFilterTaskOperation(false)
tableView.deleteRows(at: [indexPath], with: .fade)
}
//MARK: actions
var filterRange: DateRange?
func filterTasksInDateRange(_ range: DateRange? = nil) {
filterRange = range
if tableView.contentOffset != .zero {
self.tableView.setContentOffset(.zero, animated: true)
} else {
finishFilterTaskOperation()
}
}
func finishFilterTaskOperation(_ reload: Bool = true) {
if let range = filterRange {
tasks = jcore.tasks.match(range: range).fetch()
if reload {
tableView.reloadData()
}
}
}
func scrollViewDidEndScrollingAnimation(_ scrollView: UIScrollView) {
DispatchQueue.main.asyncAfter(deadline: .now()) {
self.finishFilterTaskOperation()
}
}
//MARK: tableview delegate
func numberOfSections(in tableView: UITableView) -> Int {
return 1
}
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return tasks.count
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
let cell = tableView.dequeueReusableCell(withIdentifier: "TaskCell", for: indexPath) as! TaskCell
cell.setTask(task: tasks[indexPath.row])
return cell;
}
func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath) {
// deselect our cell
tableView.deselectRow(at: indexPath, animated: true)
// create our detail controller
let taskDetailViewController = TaskEditViewController()
taskDetailViewController.task = tasks[indexPath.row]
// push vc onto the nav stack
parent.navigationController?.pushViewController(taskDetailViewController, animated: true)
}
}
<file_sep>//
// JCalendarViewCell.swift
// JCalendarView
//
// Created by <NAME> on 10/1/18.
// Copyright © 2018 <NAME>. All rights reserved.
//
import Foundation
import UIKit
class JCalendarViewCell: UICollectionViewCell {
var date = Date()
var colorScheme = JCalendarColorScheme()
let textLabel = UILabel()
var markerColor = UIColor.clear {
didSet {
marker.backgroundColor = markerColor
}
}
private var marker = UIView()
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
override init(frame: CGRect) {
super.init(frame: frame)
textLabel.translatesAutoresizingMaskIntoConstraints = false
textLabel.backgroundColor = .clear
textLabel.layer.cornerRadius = 20
textLabel.textAlignment = .center
textLabel.layer.cornerRadius = 15
textLabel.layer.masksToBounds = true
contentView.addSubview(textLabel)
NSLayoutConstraint.activate([
textLabel.widthAnchor.constraint(equalTo: widthAnchor, multiplier: 0.7),
textLabel.heightAnchor.constraint(equalTo: heightAnchor, multiplier: 0.7),
textLabel.centerXAnchor.constraint(equalTo: contentView.centerXAnchor),
textLabel.centerYAnchor.constraint(equalTo: contentView.centerYAnchor)
])
marker.translatesAutoresizingMaskIntoConstraints = false
marker.backgroundColor = .clear
marker.layer.cornerRadius = 2
marker.layer.masksToBounds = false
contentView.addSubview(marker)
NSLayoutConstraint.activate([
marker.widthAnchor.constraint(equalTo: textLabel.widthAnchor, multiplier: 0.5),
marker.heightAnchor.constraint(equalToConstant: 4),
marker.centerXAnchor.constraint(equalTo: contentView.centerXAnchor),
marker.topAnchor.constraint(equalTo: textLabel.bottomAnchor, constant: 1)
])
}
func disableFade() {
marker.isHidden = false
alpha = 1
}
func enableFade() {
marker.isHidden = true
alpha = 0.3
}
func select() {
guard textLabel.backgroundColor == .clear else { return }
textLabel.backgroundColor = date.hasSameDay(asDate: Date()) ? colorScheme.today : colorScheme.selection
textLabel.textColor = colorScheme.selectionText
}
func deselect() {
guard textLabel.backgroundColor != .clear else { return }
textLabel.backgroundColor = .clear
textLabel.textColor = date.hasSameDay(asDate: Date()) ? colorScheme.today : colorScheme.text
}
}
<file_sep>//
// Prime_PlannerTests.swift
// Prime PlannerTests
//
// Created by <NAME> on 9/6/18.
// Copyright © 2018 Poods. All rights reserved.
//
import XCTest
@testable import Prime_Planner
class Prime_PlannerTests: XCTestCase {
override func setUp() {
super.setUp()
// Put setup code here. This method is called before the invocation of each test method in the class.
}
override func tearDown() {
// Put teardown code here. This method is called after the invocation of each test method in the class.
super.tearDown()
}
}
<file_sep># Requirements #
1. The Software will run on iOS applications
* Specifically iOS 8 and above
2. Priority is either Low, Medium, or High
3. A priority is a drop down menu item
5. Priority level is seen on the side of the tasks
6. Priority level is denoted by explanation marks High = !!!, med = !!, low = !)
7. A Title is a string
8. A description is a string
9. A due date is a formatted as DD/MM/YY
10. Each Task will consist of a due date, a creation date, a title, description, and priority
* A User should be able to set each one of these fields
11. A User will be able to check a checkbox next to each task to signify completion
12. A User should be able to add a new task to the checklist
13. A User should be able to delete a new task from the todo list
14. A User should see tabs at the bottom of the screen
15. A User should be able to navigate to different views when clicking on the tabs
16. A User should be able to see the task titles on the checklist screen
17. A user should be able to view more details about a task when the task is clicked
18. A user should be able to close the application and reopen the application and still see previously made tasks
19. A User should be able to click on a task and choose to edit that task
<file_sep>//
// TaskCell.swift
// Prime Planner
//
// Created by <NAME> on 9/12/18.
// Copyright © 2018 <NAME>. All rights reserved.
//
import UIKit
import Foundation
class TaskCell: UITableViewCell {
// declare our checkbox class
let checkbox = Checkbox()
// keep track of the task connected to this cell
var task: Task!
// this is a block handler that we will use for the checkbox button action
// the first pair of parenthesis hold the parameters from the call being passed to the block.
// the second pair of parenthesis is the return from the block being passed back to the call.
// Both are empty because we are not passing / returning anything as of yet.
var checkboxAction: ((Task, Bool) -> ())?
// attribute labels
var leftView = UIView()
var nameLabel = UILabel()
var dueDateLabel = UILabel()
required init?(coder aDecoder: NSCoder) { fatalError() }
// layout the checkbox when the cell is initialized
override init(style: UITableViewCellStyle, reuseIdentifier: String?) {
super.init(style: .value1, reuseIdentifier: reuseIdentifier)
// set an indentation, just to move over all the default content of the cell to fit our checkbox
indentationLevel = 5
layoutCheckbox()
layoutNameLabel()
layoutDueDateLabel()
layoutLeftView()
}
private func layoutLeftView() {
leftView.translatesAutoresizingMaskIntoConstraints = false
contentView.addSubview(leftView)
NSLayoutConstraint.activate([
leftView.widthAnchor.constraint(equalTo: contentView.widthAnchor, multiplier: 0.75),
leftView.heightAnchor.constraint(equalToConstant: nameLabel.frame.size.height + dueDateLabel.frame.size.height),
leftView.leadingAnchor.constraint(equalTo: checkbox.trailingAnchor, constant: separatorInset.left),
leftView.centerYAnchor.constraint(equalTo: contentView.centerYAnchor)
])
}
private func layoutNameLabel() {
nameLabel.translatesAutoresizingMaskIntoConstraints = false
nameLabel.text = "Sizing Height Text"
nameLabel.sizeToFit()
nameLabel.text = nil
leftView.addSubview(nameLabel)
NSLayoutConstraint.activate([
nameLabel.widthAnchor.constraint(equalTo: leftView.widthAnchor),
nameLabel.leadingAnchor.constraint(equalTo: leftView.leadingAnchor),
nameLabel.topAnchor.constraint(equalTo: leftView.topAnchor)
])
}
private func layoutDueDateLabel() {
dueDateLabel.translatesAutoresizingMaskIntoConstraints = false
dueDateLabel.textColor = UIColor(white: 0.4, alpha: 1.0)
dueDateLabel.font = UIFont.systemFont(ofSize: 15)
dueDateLabel.text = "Sizing Height Text"
dueDateLabel.sizeToFit()
dueDateLabel.text = nil
leftView.addSubview(dueDateLabel)
NSLayoutConstraint.activate([
dueDateLabel.widthAnchor.constraint(equalTo: leftView.widthAnchor),
dueDateLabel.leadingAnchor.constraint(equalTo: leftView.leadingAnchor),
dueDateLabel.topAnchor.constraint(equalTo: nameLabel.bottomAnchor)
])
}
// layout and set proper constraints for the checkbox
private func layoutCheckbox() {
let checkboxSize: CGFloat = 35
checkbox.translatesAutoresizingMaskIntoConstraints = false
checkbox.addTarget(self, action: #selector(checkBoxTapped), for: .touchUpInside)
contentView.addSubview(checkbox)
NSLayoutConstraint.activate([
checkbox.widthAnchor.constraint(equalToConstant: checkboxSize),
checkbox.heightAnchor.constraint(equalToConstant: checkboxSize),
checkbox.leadingAnchor.constraint(equalTo: contentView.leadingAnchor, constant: separatorInset.left),
checkbox.centerYAnchor.constraint(equalTo: contentView.centerYAnchor)
])
}
// toggles the check image and calls the action
@objc func checkBoxTapped() {
checkbox.checkBoxTapped()
checkboxAction?(task, checkbox.isChecked)
}
// sets the task property of this cell as well as the name
func setTask(task: Task){
// set the task for this cell
self.task = task
// set task name and priority
if let date = task.dueDate {
textLabel?.text = nil
nameLabel.text = task.name
dueDateLabel.text = date.string
} else {
textLabel?.text = task.name
nameLabel.text = nil
dueDateLabel.text = nil
}
detailTextLabel?.text = task.priority.symbol
detailTextLabel?.textColor = task.priority.color
// set checkbox options
checkbox.isChecked = task.isChecked
checkboxAction = { task, isChecked in
task.isChecked = isChecked
jcore.save()
}
}
}
<file_sep>//
// SelectionItems.swift
//
// Created by <NAME> on 10/1/18.
// Copyright © 2018 <NAME>. All rights reserved.
//
import Foundation
enum SelectionType {
case note, calendar, priority
}
class SelectionItem {
// gets the object-related title
var title: String? {
if let priority = object as? TaskPriority {
return priority.string
}
return nil
}
// object that is 'selected'
var object: Any?
// init with a selected object
init(object: Any?) {
self.object = object
}
// default init
init() { }
}
class SelectionItems {
var type: SelectionType
var priorityTypes: [TaskPriority] = [.none, .low, .medium, .high]
// init with selection type (note, calendar, priority)
init(type: SelectionType) {
self.type = type
}
// returns the number of items represented in this selection
// or returns 0 if the selection doesn't represent an array of items
var count: Int {
return type == .priority ? priorityTypes.count : 0
}
// returns the selection item at the given index
subscript(index: Int) -> SelectionItem {
let item = SelectionItem()
if type == .priority {
item.object = priorityTypes[index]
}
return item
}
}
<file_sep>//
// File.swift
// Prime Planner
//
// Created by <NAME> on 10/3/18.
// Copyright © 2018 Poods. All rights reserved.
//
import Foundation
<file_sep>//
// AppTheme.swift
// Prime Planner
//
// Created by <NAME> on 11/3/18.
// Copyright © 2018 Poods. All rights reserved.
//
import UIKit
import Foundation
class AppTheme {
class func color() -> UIColor{ return UIColor(red:0.60, green:1.0, blue: 0.60, alpha: 1.0) }
}
<file_sep>//
// JCore.swift
// JCore
//
// Created by <NAME> on 9/6/18.
// Copyright © 2018 <NAME>. All rights reserved.
//
import Foundation
import CoreData
/// retrieves the Core Database shared instance.
var jcore = JCore.shared
class JCore {
// initialize our core database and assign it to our shared variable
static var shared = JCore()
let container: NSPersistentContainer
// getters for the context and coordinator
var context: NSManagedObjectContext {
return container.viewContext
}
var coordinator: NSPersistentStoreCoordinator {
return container.persistentStoreCoordinator
}
init() {
// initialize our container
container = NSPersistentContainer(name: "JCore")
// load the persistent store
container.loadPersistentStores { (description, error) in
if let error = error as NSError? {
fatalError("Unresolved error \(error), \(error.userInfo)")
}
}
// setup the context allowing automatic merges from the parent
context.automaticallyMergesChangesFromParent = true
}
/// Updates the context with any pending changes and saves into the database. Context defaults to the main context.
func save(context: NSManagedObjectContext = JCore.shared.context) {
if context.hasChanges {
// try to save the context
do {
try context.save()
} catch let error as NSError {
// an error could occur in the case of low space or memory on the device
fatalError("Unresolved error \(error), \(error.userInfo)")
}
// if we passed in a context other than the main context, update the main context afterwards
if context != JCore.shared.context {
save()
}
}
}
/// Inserts a new object with the passed Type into the context and returns it.
func new<T: NSManagedObject>(_ type: T.Type) -> T {
let name = NSStringFromClass(type).components(separatedBy: ".").last!
return NSEntityDescription.insertNewObject(forEntityName: name, into: context) as! T
}
/// Removes an object from the context, thus removing it from the database.
func remove(_ object: NSManagedObject?) {
guard let object = object else { return }
context.delete(object)
save()
}
/**
Begins a request for every object in the database that is associated with the given type.
- Parameter type: The object type that is being requested from the database
- Returns: A `JCoreRequest` representing the objects for the given type.
*/
func data<T: NSManagedObject>(_ type: T.Type) -> JCoreRequest<T> {
// get the class name from the passed type as a string
let className = String(describing: type)
// create a request with the class name (corresponds to the entity in our data model)
let request = NSFetchRequest<T>(entityName: className)
request.returnsObjectsAsFaults = false
// return a new core request with our fetch request
return JCoreRequest<T>(request: request)
}
}
<file_sep>//
// Extensions-Convenience.swift
// Prime Planner
//
// Created by <NAME> on 10/1/18.
// Copyright © 2018 Poods. All rights reserved.
//
import Foundation
import UIKit
extension UIView {
// sets the scale of this view
func setScale(_ scale: CGFloat, center: Bool = true) {
transform = CGAffineTransform(scaleX: scale, y: scale)
}
// sets the rotation of this view
func setRotation(angle: CGFloat) {
transform = CGAffineTransform(rotationAngle: angle)
}
// sets the corner radius of this view
func setRadius(_ radius: CGFloat) {
self.layer.masksToBounds = true
self.layer.cornerRadius = radius
}
// adds a border to the view
func setBorder(_ width: CGFloat, color: UIColor) {
self.layer.borderWidth = width
self.layer.borderColor = color.cgColor
}
}
extension UIImage {
// returns the image filled with the given color
func image(withColor color: UIColor) -> UIImage {
UIGraphicsBeginImageContextWithOptions(self.size, false, self.scale)
color.setFill()
let context = UIGraphicsGetCurrentContext()
context?.translateBy(x: 0, y: self.size.height)
context?.scaleBy(x: 1.0, y: -1.0)
context?.setBlendMode(CGBlendMode.normal)
let rect = CGRect(x: 0, y: 0, width: self.size.width, height: self.size.height)
context?.clip(to: rect, mask: self.cgImage!)
context?.fill(rect)
let newImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return newImage!
}
}
<file_sep>//
// SelectionViewController~TableView.swift
//
// Created by <NAME> on 10/1/18.
// Copyright © 2018 <NAME>. All rights reserved.
//
import Foundation
import UIKit
extension SelectionViewController: UITableViewDelegate, UITableViewDataSource {
func numberOfSections(in tableView: UITableView) -> Int {
return showsNoItemSection ? 2 : 1
}
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
if section == 0 && showsNoItemSection { return 1 }
return items.count
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
var cell: UITableViewCell! = tableView.dequeueReusableCell(withIdentifier: "Cell")
if cell == nil {
cell = UITableViewCell(style: .default, reuseIdentifier: "Cell")
cell.selectionStyle = .none
}
let name = items[indexPath.row].title
cell.textLabel?.text = name
cell.contentView.alpha = 1
cell.isUserInteractionEnabled = true
cell.accessoryType = .none
cell.backgroundColor = UIColor.clear
cell.contentView.backgroundColor = UIColor.clear
cell.textLabel?.textColor = .black
if let object = selected.object as? TaskPriority, name == object.string {
cell.accessoryType = .checkmark
}
return cell
}
func tableView(_ tableView: UITableView, didHighlightRowAt indexPath: IndexPath) {
UIView.animate(withDuration: 0.15) {
tableView.cellForRow(at: indexPath)?.setScale(0.97)
}
}
func tableView(_ tableView: UITableView, didUnhighlightRowAt indexPath: IndexPath) {
UIView.animate(withDuration: 0.15) {
tableView.cellForRow(at: indexPath)?.setScale(1.0)
}
}
func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath) {
dismiss(items[indexPath.row], cancelled: false)
}
}
<file_sep>//
// JCalendarView.swift
// JCalendarView
//
// Created by <NAME> on 10/1/18.
// Copyright © 2018 <NAME>. All rights reserved.
//
import Foundation
import UIKit
protocol JCalendarPageDelegate {
func calendarPage(_ page: JCalendarPage, didSelectDate date: Date, selectedAutomatically: Bool, isReselecting: Bool)
func calendarPage(_ page: JCalendarPage, willUpdateHeight height: CGFloat)
}
protocol JCalendarPageDataSource {
func calendarPage(_ calendarPage: JCalendarPage, markerColorForDate date: Date) -> UIColor?
}
class JCalendarPage: UIView {
var date: Date
var colorScheme = JCalendarColorScheme()
var delegate: JCalendarPageDelegate?
var dataSource: JCalendarPageDataSource?
let collectionView: UICollectionView
private let numColumns = 7
private var days = [Int]()
private var numberOfDays: (prepend: Int, add: Int, append: Int) = (0, 0, 0)
private var horizontalGridLines = [UIView]()
private var heightConstraint: NSLayoutConstraint!
private var numberOfWeeks: Int {
return isWeekly ? 1 : date.numberOfWeeksInMonth
}
private var selectedCell: JCalendarViewCell?
private var isWeekly: Bool
var pageHeight: CGFloat {
return (collectionView.bounds.size.width / CGFloat(numColumns)).rounded(.down) * CGFloat(numberOfWeeks)
}
func setDate(_ date: Date, forceUpdateLayout: Bool = false) {
let oldValue = self.date
self.date = date
if forceUpdateLayout || (!isWeekly && !date.hasSameMonth(asDate: oldValue) || (isWeekly && !date.contained(in: oldValue.weekRange))) {
updateLayout()
}
}
init(date: Date, isWeekly: Bool) {
self.isWeekly = isWeekly
self.date = date
let flow = UICollectionViewFlowLayout()
flow.scrollDirection = .vertical
flow.minimumLineSpacing = 0
flow.minimumInteritemSpacing = 0
collectionView = UICollectionView(frame: .zero, collectionViewLayout: flow)
super.init(frame: .zero)
loadData()
layoutCollectionView()
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
func layoutCollectionView() {
collectionView.backgroundColor = UIColor.clear
collectionView.register(JCalendarViewCell.self, forCellWithReuseIdentifier: "Cell")
collectionView.delegate = self
collectionView.dataSource = self
collectionView.translatesAutoresizingMaskIntoConstraints = false
addSubview(collectionView)
heightConstraint = collectionView.heightAnchor.constraint(equalToConstant: 0)
NSLayoutConstraint.activate([
collectionView.widthAnchor.constraint(equalTo: widthAnchor),
collectionView.centerXAnchor.constraint(equalTo: centerXAnchor),
heightConstraint,
collectionView.topAnchor.constraint(equalTo: topAnchor)
])
}
private func updateLayout() {
loadData()
updateHeightConstraint()
collectionView.reloadData()
}
override func willMove(toWindow newWindow: UIWindow?) {
if let _ = newWindow {
updateHeightConstraint()
}
}
private func updateHeightConstraint() {
let size = pageHeight
guard heightConstraint.constant != size else { return }
// activate height constraint if necessary
heightConstraint.constant = size
delegate?.calendarPage(self, willUpdateHeight: size)
}
private func loadData() {
selectedCell = nil
days.removeAll()
if isWeekly {
// add days before week if necessary
if (!date.beginningOfWeek.contained(in: date.monthRange)) {
let firstDayInMonth = date.beginningOfMonth
let firstWeekdayInMonth = firstDayInMonth.weekday
let lastMonth = date.adding(month: -1)
let numberOfDaysLastMonth = lastMonth.numberOfDaysInMonth
numberOfDays.prepend = firstWeekdayInMonth - Calendar.current.firstWeekday
for i in 0..<numberOfDays.prepend {
days.insert(numberOfDaysLastMonth - i, at: 0)
}
}
// add days in week
var currentDateInWeek = date.beginningOfWeek
for _ in 0..<7 {
guard currentDateInWeek.contained(in: date.monthRange) else {
currentDateInWeek = currentDateInWeek.adding(day: 1)
continue
}
numberOfDays.add += 1
days.append(currentDateInWeek.day)
currentDateInWeek = currentDateInWeek.adding(day: 1)
}
// add days after month if necessary
if (!date.endOfWeek.contained(in: date.monthRange)) {
let lastDayInMonth = date.endOfMonth
let lastWeekdayInMonth = lastDayInMonth.weekday
numberOfDays.append = Calendar.current.firstWeekday + Calendar.current.weekdaySymbols.count - 1 - lastWeekdayInMonth
for i in 0..<numberOfDays.append {
days.append(i + 1)
}
}
} else {
// add days of month
numberOfDays.add = date.numberOfDaysInMonth
for i in 1...numberOfDays.add {
days.append(i)
}
// add days before month if necessary
let firstDayInMonth = date.beginningOfMonth
let firstWeekdayInMonth = firstDayInMonth.weekday
let lastMonth = date.adding(month: -1)
let numberOfDaysLastMonth = lastMonth.numberOfDaysInMonth
numberOfDays.prepend = firstWeekdayInMonth - Calendar.current.firstWeekday
for i in 0..<numberOfDays.prepend {
days.insert(numberOfDaysLastMonth - i, at: 0)
}
// add days after month if necessary
let lastDayInMonth = date.endOfMonth
let lastWeekdayInMonth = lastDayInMonth.weekday
numberOfDays.append = Calendar.current.firstWeekday + Calendar.current.weekdaySymbols.count - 1 - lastWeekdayInMonth
for i in 0..<numberOfDays.append {
days.append(i + 1)
}
}
}
func select(day: Int) {
var date = self.date
date.set(day: day)
select(cell: collectionView.cellForItem(at: IndexPath(item: numberOfDays.prepend + day - 1, section: 0)) as? JCalendarViewCell, alertDelegate: false)
}
func select(date: Date) {
select(day: date.day)
}
func setDate(toSectionAfterDate date: Date) {
var dateAfter: Date
if !isWeekly {
// setup next month
dateAfter = date.adding(month: 1)
if dateAfter.hasSameMonth(asDate: Date()) {
dateAfter = Date()
} else {
dateAfter = dateAfter.beginningOfMonth
}
} else {
// setup next n weeks
dateAfter = date.adding(week: 1)
if dateAfter.weekRange.includes(date: Date()) {
dateAfter = Date()
} else {
dateAfter = dateAfter.beginningOfWeek
}
}
setDate(dateAfter)
}
func setDate(toSectionBeforeDate date: Date) {
var dateBefore: Date
if !isWeekly {
// setup previous month
dateBefore = date.adding(month: -1)
if dateBefore.hasSameMonth(asDate: Date()) {
dateBefore = Date()
} else {
dateBefore = dateBefore.beginningOfMonth
}
} else {
// setup previous n weeks
dateBefore = date.adding(week: -1)
if dateBefore.weekRange.includes(date: Date()) {
dateBefore = Date()
} else {
dateBefore = dateBefore.beginningOfWeek
}
}
setDate(dateBefore)
}
}
extension JCalendarPage: UICollectionViewDelegate, UICollectionViewDataSource, UICollectionViewDelegateFlowLayout {
func collectionView(_ collectionView: UICollectionView, numberOfItemsInSection section: Int) -> Int {
return days.count
}
func collectionView(_ collectionView: UICollectionView, cellForItemAt indexPath: IndexPath) -> UICollectionViewCell {
let cell = collectionView.dequeueReusableCell(withReuseIdentifier: "Cell", for: indexPath) as! JCalendarViewCell
let i = indexPath.item
let day = days[i]
cell.clipsToBounds = false
cell.markerColor = .clear
cell.textLabel.text = "\(day)"
if i % 7 == 0 || (i + 1) % 7 == 0 {
cell.contentView.backgroundColor = UIColor(white: 0.95, alpha: 1)
} else {
cell.contentView.backgroundColor = .clear
}
var monthDate = date
if i < numberOfDays.prepend || i >= numberOfDays.prepend + numberOfDays.add {
cell.enableFade()
monthDate = date.adding(
month: i < numberOfDays.prepend ? -1 : 1
)
} else {
cell.disableFade()
}
monthDate.set(day: day)
cell.date = monthDate
cell.textLabel.textColor = cell.date.hasSameDay(asDate: Date()) ? colorScheme.today : colorScheme.text
cell.deselect()
let flatDate = cell.date.beginningOfDay
if flatDate == date.beginningOfDay {
select(cell: cell, alertDelegate: false)
}
if let color = self.dataSource?.calendarPage(self, markerColorForDate: flatDate) {
cell.markerColor = color
}
return cell
}
func collectionView(_ collectionView: UICollectionView, didHighlightItemAt indexPath: IndexPath) {
select(cell: collectionView.cellForItem(at: indexPath) as? JCalendarViewCell, alertDelegate: true)
}
func collectionView(_ collectionView: UICollectionView, layout collectionViewLayout: UICollectionViewLayout, sizeForItemAt indexPath: IndexPath) -> CGSize {
let itemWidth = (collectionView.bounds.size.width / CGFloat(numColumns)).rounded(.down)
return CGSize(width: itemWidth, height: itemWidth)
}
func select(cell: JCalendarViewCell?, alertDelegate: Bool) {
guard let cell = cell else { return }
let isReselecting = selectedCell == cell
if !isReselecting {
cell.select()
selectedCell?.deselect()
selectedCell = cell
}
if alertDelegate {
delegate?.calendarPage(self, didSelectDate: cell.date, selectedAutomatically: false, isReselecting: isReselecting)
}
}
}
<file_sep>//
// CoreDataTests.swift
// Prime PlannerTests
//
// Created by <NAME> on 9/19/18.
// Copyright © 2018 Poods. All rights reserved.
//
import Foundation
import XCTest
@testable import Prime_Planner
class CoreDataTests: XCTestCase {
func testInsertionRemoval() {
let task = testInsertTask()
testRemoveTask(task)
}
func testInsertTask() -> Task {
// create a test task
let task = Task(name: "Hello, World!")
task.creationDate = Date()
task.note = "This is a note."
task.priority = .low
// insert the task into the database
jcore.save()
// save the id
let id = task.id
// get task from database
let getTask = jcore.tasks.match(id: id).fetchFirst()
// check if the task exists with the specified name
XCTAssertNotNil(getTask)
return getTask!
}
func testRemoveTask(_ task: Task) {
// get task id
let id = task.id
// get task from database
let task = jcore.tasks.match(id: id).fetchFirst()
// check if the task exists with the specified name
XCTAssertNotNil(task)
// remove the task from the database
jcore.remove(task)
// check if the task is nil (was removed properly)
XCTAssertNil(jcore.tasks.match(id: id).fetchFirst())
}
func testPerformanceInsertionRemoval() {
measure {
self.testInsertionRemoval()
}
}
}
<file_sep>//
// JCalendarPageViewController.swift
// JCalendarView
//
// Created by <NAME> on 10/1/18.
// Copyright © 2018 <NAME>. All rights reserved.
//
import Foundation
import UIKit
struct JCalendarColorScheme {
var text: UIColor = .black
var today: UIColor = .red
var selection: UIColor = .black
var selectionText: UIColor = .white
init() { }
}
enum JDirection {
case none, backwards, forwards
}
protocol JCalendarDelegate {
func calendar(_ calendar: JCalendar, didSelectDate date: Date, selectedAutomatically: Bool, isReselecting: Bool)
func calendar(_ calendar: JCalendar, willUpdateHeight height: CGFloat)
}
protocol JCalendarDataSource {
func calendar(_ calendar: JCalendar, markerColorForDate date: Date) -> UIColor?
}
class JCalendar: UIView {
var delegate: JCalendarDelegate?
var dataSource: JCalendarDataSource?
var collectionView: UICollectionView {
return page2.collectionView
}
var dateFormat = "MMMM yyyy" {
didSet {
setDateViewLabelDate(date)
}
}
private var date: Date {
didSet {
guard date != oldValue else { return }
resetPageDates()
}
}
private let colorScheme = JCalendarColorScheme()
private let scrollView = UIScrollView()
private let weekView = UIView()
private let dateView = UIView()
private let dateViewLabel = UILabel()
private let todayButton = UIButton()
private var selectingDate: Date?
private var targetPage: Int?
private var heightPrepared = false
private var selectedPageAutomatically = true
private let page1: JCalendarPage
private let page2: JCalendarPage
private let page3: JCalendarPage
var headerHeight: CGFloat
var isWeekly: Bool
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
init(date: Date = Date(), headerHeight: CGFloat = 88, isWeekly: Bool = false) {
self.date = date
self.headerHeight = headerHeight
self.isWeekly = isWeekly
var dateBefore: Date
var dateAfter: Date
if !isWeekly {
// setup next month
dateAfter = date.adding(month: 1)
if dateAfter.hasSameMonth(asDate: Date()) {
dateAfter = Date()
} else {
dateAfter = dateAfter.beginningOfMonth
}
// setup previous month
dateBefore = date.adding(month: -1)
if dateBefore.hasSameMonth(asDate: Date()) {
dateBefore = Date()
} else {
dateBefore = dateBefore.beginningOfMonth
}
} else {
// setup next n weeks
dateAfter = date.adding(week: 1)
if dateAfter.weekRange.includes(date: Date()) {
dateAfter = Date()
} else {
dateAfter = dateAfter.beginningOfWeek
}
// setup previous n weeks
dateBefore = date.adding(week: -1)
if dateBefore.weekRange.includes(date: Date()) {
dateBefore = Date()
} else {
dateBefore = dateBefore.beginningOfWeek
}
}
// create pages
page1 = JCalendarPage(date: dateBefore, isWeekly: isWeekly)
page2 = JCalendarPage(date: date, isWeekly: isWeekly)
page3 = JCalendarPage(date: dateAfter, isWeekly: isWeekly)
super.init(frame: .zero)
backgroundColor = .white
// setup page delegates
page1.delegate = self
page2.delegate = self
page3.delegate = self
// setup page data sources
page1.dataSource = self
page2.dataSource = self
page3.dataSource = self
// layout our calendar and pages
layoutDateView()
layoutWeekView()
layoutScrollView()
addPages()
}
override func willMove(toWindow newWindow: UIWindow?) {
super.willMove(toWindow: newWindow)
if let _ = newWindow {
scrollView.layoutIfNeeded()
scrollView.contentSize = CGSize(width: scrollView.frame.size.width * (3), height: scrollView.frame.size.height)
goToPage(1)
}
}
private func layoutScrollView() {
scrollView.delegate = self
scrollView.panGestureRecognizer.maximumNumberOfTouches = 1
scrollView.translatesAutoresizingMaskIntoConstraints = false
scrollView.isPagingEnabled = true
scrollView.bounces = false
scrollView.showsHorizontalScrollIndicator = false
addSubview(scrollView)
NSLayoutConstraint.activate([
scrollView.widthAnchor.constraint(equalTo: widthAnchor),
scrollView.topAnchor.constraint(equalTo: weekView.bottomAnchor),
scrollView.centerXAnchor.constraint(equalTo: centerXAnchor),
scrollView.bottomAnchor.constraint(equalTo: bottomAnchor)
])
}
private func layoutDateView() {
// layout dateview
dateView.translatesAutoresizingMaskIntoConstraints = false
addSubview(dateView)
NSLayoutConstraint.activate([
dateView.widthAnchor.constraint(equalTo: widthAnchor),
dateView.heightAnchor.constraint(equalToConstant: headerHeight / 2),
dateView.topAnchor.constraint(equalTo: topAnchor),
dateView.centerXAnchor.constraint(equalTo: centerXAnchor)
])
// layout label
dateViewLabel.translatesAutoresizingMaskIntoConstraints = false
setDateViewLabelDate(date)
dateViewLabel.font = UIFont.systemFont(ofSize: 22)
dateViewLabel.textAlignment = .center
dateView.addSubview(dateViewLabel)
NSLayoutConstraint.activate([
dateViewLabel.widthAnchor.constraint(equalTo: dateView.widthAnchor),
dateViewLabel.heightAnchor.constraint(equalTo: dateView.heightAnchor),
dateViewLabel.centerXAnchor.constraint(equalTo: dateView.centerXAnchor),
dateViewLabel.centerYAnchor.constraint(equalTo: dateView.centerYAnchor)
])
todayButton.translatesAutoresizingMaskIntoConstraints = false
todayButton.setTitle("Today", for: .normal)
todayButton.setTitleColor(colorScheme.today, for: .normal)
todayButton.titleLabel?.font = UIFont.boldSystemFont(ofSize: 16)
todayButton.sizeToFit()
todayButton.addTarget(self, action: #selector(todayButtonPressed(_:)), for: [.touchUpInside])
todayButton.addTarget(self, action: #selector(todayButtonExit(_:)), for: [.touchDragExit, .touchCancel])
todayButton.addTarget(self, action: #selector(todayButtonDown(_:)), for: [.touchDown, .touchDragEnter])
dateView.addSubview(todayButton)
NSLayoutConstraint.activate([
todayButton.trailingAnchor.constraint(equalTo: dateView.trailingAnchor, constant: -20),
todayButton.centerYAnchor.constraint(equalTo: dateView.centerYAnchor)
])
}
private func layoutWeekView() {
weekView.translatesAutoresizingMaskIntoConstraints = false
addSubview(weekView)
NSLayoutConstraint.activate([
weekView.widthAnchor.constraint(equalTo: widthAnchor),
weekView.heightAnchor.constraint(equalToConstant: headerHeight / 2),
weekView.centerXAnchor.constraint(equalTo: centerXAnchor),
weekView.topAnchor.constraint(equalTo: dateView.bottomAnchor)
])
var previousWeekdayView: UIView?
for i in 0..<7 {
let weekdayView = UIView()
weekdayView.translatesAutoresizingMaskIntoConstraints = false
weekView.addSubview(weekdayView)
NSLayoutConstraint.activate([
weekdayView.widthAnchor.constraint(equalTo: weekView.widthAnchor, multiplier: 1 / 7),
weekdayView.heightAnchor.constraint(equalTo: weekView.heightAnchor),
weekdayView.leadingAnchor.constraint(equalTo: previousWeekdayView?.trailingAnchor ?? weekView.leadingAnchor),
weekdayView.bottomAnchor.constraint(equalTo: weekView.bottomAnchor)
])
previousWeekdayView = weekdayView
// setup label for weekday view
let textLabel = UILabel()
textLabel.translatesAutoresizingMaskIntoConstraints = false
textLabel.textAlignment = .center
textLabel.text = Date.veryShortWeekdaySymbols[i]
// set text color for weekends
if i == 0 || i == 6 {
textLabel.textColor = UIColor(white: 0, alpha: 0.5)
}
// add to weekday view
weekdayView.addSubview(textLabel)
// setup constraints
NSLayoutConstraint.activate([
textLabel.widthAnchor.constraint(equalTo: weekdayView.widthAnchor),
textLabel.heightAnchor.constraint(equalTo: weekdayView.heightAnchor),
textLabel.centerXAnchor.constraint(equalTo: weekdayView.centerXAnchor),
textLabel.centerYAnchor.constraint(equalTo: weekdayView.centerYAnchor)
])
}
}
private func addPages() {
// page 1
page1.translatesAutoresizingMaskIntoConstraints = false
scrollView.addSubview(page1)
NSLayoutConstraint.activate([
page1.widthAnchor.constraint(equalTo: scrollView.widthAnchor),
page1.heightAnchor.constraint(equalTo: scrollView.heightAnchor),
page1.topAnchor.constraint(equalTo: scrollView.topAnchor),
page1.leadingAnchor.constraint(equalTo: scrollView.leadingAnchor),
])
// page 2
page2.translatesAutoresizingMaskIntoConstraints = false
scrollView.addSubview(page2)
NSLayoutConstraint.activate([
page2.widthAnchor.constraint(equalTo: scrollView.widthAnchor),
page2.heightAnchor.constraint(equalTo: scrollView.heightAnchor),
page2.topAnchor.constraint(equalTo: scrollView.topAnchor),
page2.leadingAnchor.constraint(equalTo: page1.trailingAnchor),
])
// page 3
page3.translatesAutoresizingMaskIntoConstraints = false
scrollView.addSubview(page3)
NSLayoutConstraint.activate([
page3.widthAnchor.constraint(equalTo: scrollView.widthAnchor),
page3.heightAnchor.constraint(equalTo: scrollView.heightAnchor),
page3.topAnchor.constraint(equalTo: scrollView.topAnchor),
page3.leadingAnchor.constraint(equalTo: page2.trailingAnchor),
])
}
private func resetPageDates() {
page1.setDate(toSectionBeforeDate: date)
page2.setDate(date)
page3.setDate(toSectionAfterDate: date)
page2.delegate?.calendarPage(page2, didSelectDate: date, selectedAutomatically: selectedPageAutomatically, isReselecting: false)
selectedPageAutomatically = true
setDateViewLabelDate(page2.date)
}
private func prepareForPageChange(direction: JDirection) {
let offset = direction == .backwards ? -1 : 1
var date: Date
if isWeekly {
date = self.date.adding(week: offset)
if date.weekRange.includes(date: Date()) {
date = Date()
} else {
date = date.beginningOfWeek
}
} else {
date = self.date.adding(month: offset)
if date.hasSameMonth(asDate: Date()) {
date = Date()
} else {
date = date.beginningOfMonth
}
}
self.date = date
updateContentOffsetForMovement(inDirection: direction)
}
private func prepareHeightForPageChange(direction: JDirection, forceDate: Date?) {
let numWeeks: Int
if isWeekly {
numWeeks = 1
} else {
let offset = direction == .backwards ? -1 : 1
var date = forceDate ?? self.date.adding(month: offset)
if date.hasSameMonth(asDate: Date()) {
date = Date()
} else {
date = date.beginningOfMonth
}
numWeeks = date.numberOfWeeksInMonth
}
scrollView.contentSize = CGSize(width: scrollView.frame.size.width * (3), height: scrollView.frame.size.height)
delegate?.calendar(self, willUpdateHeight: (frame.width / 7).rounded(.down) * CGFloat(numWeeks))
}
private func goToPage(_ page: Int, animated: Bool = false) {
scrollView.isUserInteractionEnabled = !animated
var offset = scrollView.contentOffset
let x = scrollView.frame.size.width * CGFloat(page)
if offset.x != x {
offset.x = x
scrollView.setContentOffset(offset, animated: animated)
}
}
private func pageAtOffset(_ offset: CGPoint) -> Int {
return Int(offset.x / scrollView.frame.size.width)
}
private func updateContentOffsetForMovement(inDirection direction: JDirection) {
switch direction {
case .backwards:
scrollView.contentOffset.x += scrollView.frame.size.width
case .forwards:
scrollView.contentOffset.x -= scrollView.frame.size.width
default:
break
}
}
func selectDate(_ date: Date) {
selectingDate = date
if (!isWeekly && date.hasSameMonth(asDate: self.date)) ||
(isWeekly && date.contained(in: self.date.weekRange)) {
page2.setDate(date)
} else {
let i = date < self.date ? 0 : 2
let page = i == 0 ? page1 : page3
page.setDate(date, forceUpdateLayout: true)
goToPage(i, animated: true)
scrollViewWillEndAtPage(i, forceDate: date)
}
}
private func setDateViewLabelDate(_ date: Date) {
dateViewLabel.text = date.string(format: dateFormat)
if (!isWeekly && date.hasSameMonth(asDate: Date())) ||
(isWeekly && date.contained(in: Date().weekRange)) {
dateViewLabel.textColor = colorScheme.today
todayButton.isHidden = true
} else {
dateViewLabel.textColor = colorScheme.text
todayButton.isHidden = false
}
}
@objc private func todayButtonDown(_ sender: UIButton) {
sender.alpha = 0.4
}
@objc private func todayButtonExit(_ sender: UIButton) {
sender.alpha = 1
}
@objc private func todayButtonPressed(_ sender: UIButton) {
sender.alpha = 1
let today = Date()
selectDate(today)
}
}
extension JCalendar: UIScrollViewDelegate, JCalendarPageDelegate, JCalendarPageDataSource {
func scrollViewWillEndDragging(_ scrollView: UIScrollView, withVelocity velocity: CGPoint, targetContentOffset: UnsafeMutablePointer<CGPoint>) {
scrollView.isUserInteractionEnabled = false
targetPage = Int(targetContentOffset.pointee.x / scrollView.frame.size.width)
if targetPage == 1 {
targetPage = nil
scrollView.isUserInteractionEnabled = true
} else {
scrollViewWillEndAtPage(targetPage!, forceDate: nil)
}
}
func scrollViewDidEndDecelerating(_ scrollView: UIScrollView) {
guard let targetPage = targetPage else { return }
scrollViewDidEndAtPage(targetPage)
scrollView.isUserInteractionEnabled = true
self.targetPage = nil
}
func scrollViewDidEndScrollingAnimation(_ scrollView: UIScrollView) {
guard let date = selectingDate else { return }
goToPage(1)
self.date = date
selectingDate = nil
scrollView.isUserInteractionEnabled = true
}
private func scrollViewDidEndAtPage(_ page: Int) {
prepareForPageChange(direction: page < 1 ? .backwards : .forwards)
}
private func scrollViewWillEndAtPage(_ page: Int, forceDate: Date?) {
prepareHeightForPageChange(direction: page < 1 ? .backwards : .forwards, forceDate: forceDate)
}
func calendarPage(_ page: JCalendarPage, didSelectDate date: Date, selectedAutomatically: Bool, isReselecting: Bool) {
if self.date.hasSameMonth(asDate: date) {
delegate?.calendar(self, didSelectDate: date, selectedAutomatically: selectedAutomatically, isReselecting: isReselecting)
} else {
selectedPageAutomatically = selectedAutomatically
selectDate(date)
}
}
func calendarPage(_ calendarPage: JCalendarPage, markerColorForDate date: Date) -> UIColor? {
return dataSource?.calendar(self, markerColorForDate: date)
}
func calendarPage(_ page: JCalendarPage, willUpdateHeight height: CGFloat) {
guard !heightPrepared else { return }
if page == page2 {
delegate?.calendar(self, willUpdateHeight: height)
heightPrepared = true
}
}
}
<file_sep>//
// JManaged.swift
// JCore
//
// Created by <NAME> on 9/18/18.
// Copyright © 2018 <NAME>. All rights reserved.
//
import Foundation
import CoreData
// Class that simplifies the management of objects in the database, if they are desired to be customized.
class JManaged {
class func set(_ value: Any?, forKey key: String, inObject object: NSManagedObject) {
object.willChangeValue(forKey: key)
object.setPrimitiveValue(value, forKey: key)
object.didChangeValue(forKey: key)
}
class func get(_ key: String, inObject object: NSManagedObject) -> Any? {
object.willAccessValue(forKey: key)
let value = object.primitiveValue(forKey: key)
object.didAccessValue(forKey: key)
return value
}
}
<file_sep>//
// SelectionViewController~Calendar.swift
//
// Created by <NAME> on 10/1/18.
// Copyright © 2018 <NAME>. All rights reserved.
//
import Foundation
import UIKit
extension SelectionViewController: JCalendarDelegate {
func layoutCalendarView() {
let calendar = JCalendar(date: selected.object as? Date ?? Date())
calendar.dateFormat = "MMM yyyy"
calendar.delegate = self
calendar.translatesAutoresizingMaskIntoConstraints = false
tableContainer.insertSubview(calendar, belowSubview: buttonView)
NSLayoutConstraint.activate([
calendar.widthAnchor.constraint(equalTo: tableContainer.widthAnchor),
calendar.centerXAnchor.constraint(equalTo: tableContainer.centerXAnchor),
calendar.topAnchor.constraint(equalTo: tableContainer.topAnchor),
calendar.bottomAnchor.constraint(equalTo: buttonView.topAnchor)
])
}
// calendar delegate that handles the dismissal of the calendar upon selection
func calendar(_ calendar: JCalendar, didSelectDate date: Date, selectedAutomatically: Bool, isReselecting: Bool) {
if !selectedAutomatically {
selected.object = date
dismiss(selected, cancelled: false)
}
}
// calendar delegate that observes when the calendar height should change (number of weeks change)
func calendar(_ calendar: JCalendar, willUpdateHeight height: CGFloat) {
let constant = calendar.headerHeight + height + buttonView.height
guard heightConstraint.constant != constant else { return }
let animate = heightConstraint.constant != 150
self.view.layoutIfNeeded()
heightConstraint.constant = constant
if animate {
let anim = UIViewPropertyAnimator(duration: 0.3, curve: .linear) {
self.view.layoutIfNeeded()
}
anim.startAnimation()
} else {
self.view.layoutIfNeeded()
}
}
}
<file_sep>//
// TaskDetailViewController.swift
// Prime Planner
//
// Created by <NAME> on 9/14/18.
// Copyright © 2018 Poods. All rights reserved.
//
import Foundation
import UIKit
class TaskDetailViewController: UIViewController {
private let testDetailLabel = UILabel()
// this is project private by default, so any class in the project can access and edit it
var detailTextPassedFromPreviousController = "Hello, World!" {
// lets observe the setter and update our label text when the detail text is changed
didSet {
// update our label text
testDetailLabel.text = detailTextPassedFromPreviousController
// once the text is set, lets size the label to fit the text that its holding
testDetailLabel.sizeToFit()
}
}
override func viewDidLoad() {
super.viewDidLoad()
// set our background color, because it's transparent by default
view.backgroundColor = .white
// layout our test label
layoutTestDetailLabel()
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
// ensure that our nav bar is showing in this controller,
// since it's being pushed from a controller where it's hidden
navigationController?.setNavigationBarHidden(false, animated: true)
}
func layoutTestDetailLabel() {
testDetailLabel.translatesAutoresizingMaskIntoConstraints = false
testDetailLabel.font = UIFont.systemFont(ofSize: 60)
testDetailLabel.textColor = .red
view.addSubview(testDetailLabel)
// we're only going to constrain the center anchors, because
// the sizing is managed by the setter observer above
NSLayoutConstraint.activate([
testDetailLabel.centerXAnchor.constraint(equalTo: view.centerXAnchor),
testDetailLabel.centerYAnchor.constraint(equalTo: view.centerYAnchor)
])
}
}
<file_sep>//
// ViewControllerDashboard.swift
// Prime Planner
//
// Created by <NAME> on 10/4/18.
// Copyright © 2018 Poods. All rights reserved.
//
import UIKit
import Foundation
class ViewControllerDashboard: UIViewController {
let tableView = UITableView(frame: .zero, style: .grouped)
let calendarContainer = UIView()
let calendar = JCalendar(date: Date(), headerHeight: 60, isWeekly: true)
var calendarHeightConstraint: NSLayoutConstraint!
var tasks = [[Task]]()
var taskHeaders = [String]()
override func viewDidLoad() {
super.viewDidLoad()
navigationController?.setNavigationBarHidden(true, animated: false)
layoutCalendarView()
layoutTableView()
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
// reload the data each time the view appears
loadData()
tableView.reloadData()
}
func loadData() {
// clear out the previous data
tasks.removeAll()
taskHeaders.removeAll()
// create ranges for day and week
let dayRange = Date().dayRange
let weekRange = Date().weekRange
// fetch our tasks based on the created date ranges
let todayTasks = jcore.tasks.match(range: dayRange).sort("dueDate", ascending: true).fetch()
let weekTasks = jcore.tasks.match(range: weekRange).sort("dueDate", ascending: true).fetch().filter({ !todayTasks.contains($0) })
let noDueDateTasks = jcore.tasks.filter("dueDate == nil").fetch()
let upcomingTasks = jcore.tasks.fetch().filter({ !todayTasks.contains($0) && !weekTasks.contains($0) && !noDueDateTasks.contains($0) })
// add the fetched tasks into the array
if !noDueDateTasks.isEmpty {
tasks.append(noDueDateTasks)
taskHeaders.append("No Due Date")
}
if !todayTasks.isEmpty {
tasks.append(todayTasks)
taskHeaders.append("Today")
}
if !weekTasks.isEmpty {
tasks.append(weekTasks)
taskHeaders.append("This Week")
}
if !upcomingTasks.isEmpty {
tasks.append(upcomingTasks)
taskHeaders.append("Upcoming")
}
}
func layoutTableView() {
// set tableview delegates & row height
tableView.delegate = self
tableView.dataSource = self
tableView.rowHeight = 70
// we are manually setting constraints so we turn autoconstraints off
tableView.translatesAutoresizingMaskIntoConstraints = false
// register this tableview with the task cell class
tableView.register(TaskCell.self, forCellReuseIdentifier: "TaskCell")
// add the tableview to the main view of this view controller (self)
view.addSubview(tableView)
// constraint dimensions to match the view (fills the entire view)
NSLayoutConstraint.activate([
tableView.topAnchor.constraint(equalTo: calendarContainer.bottomAnchor),
tableView.bottomAnchor.constraint(equalTo: view.bottomAnchor),
tableView.centerXAnchor.constraint(equalTo: view.centerXAnchor),
tableView.widthAnchor.constraint(equalTo: view.widthAnchor)
])
// create a separator
let sep = UIView()
sep.translatesAutoresizingMaskIntoConstraints = false
sep.backgroundColor = UIColor(white: 0.95, alpha: 1)
view.addSubview(sep)
NSLayoutConstraint.activate([
sep.heightAnchor.constraint(equalToConstant: 2),
sep.widthAnchor.constraint(equalTo: view.widthAnchor),
sep.topAnchor.constraint(equalTo: tableView.topAnchor),
sep.centerXAnchor.constraint(equalTo: view.centerXAnchor)
])
}
func layoutCalendarView() {
// setup the calendar container
calendarContainer.translatesAutoresizingMaskIntoConstraints = false
view.addSubview(calendarContainer)
// add container constraints
calendarHeightConstraint = calendarContainer.heightAnchor.constraint(equalToConstant: 150)
NSLayoutConstraint.activate([
calendarContainer.widthAnchor.constraint(equalTo: view.widthAnchor),
calendarContainer.topAnchor.constraint(equalTo: view.topAnchor, constant: UIApplication.shared.statusBarFrame.height + 10),
calendarHeightConstraint,
calendarContainer.centerXAnchor.constraint(equalTo: view.centerXAnchor)
])
// setup calendar
calendar.delegate = self
calendar.dataSource = self
calendar.translatesAutoresizingMaskIntoConstraints = false
calendarContainer.addSubview(calendar)
// add calendar constraints
NSLayoutConstraint.activate([
calendar.widthAnchor.constraint(equalTo: calendarContainer.widthAnchor),
calendar.heightAnchor.constraint(equalTo: calendarContainer.heightAnchor),
calendar.centerXAnchor.constraint(equalTo: calendarContainer.centerXAnchor),
calendar.centerYAnchor.constraint(equalTo: calendarContainer.centerYAnchor)
])
}
}
extension ViewControllerDashboard: UITableViewDelegate, UITableViewDataSource {
func numberOfSections(in tableView: UITableView) -> Int {
return tasks.count
}
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return tasks[section].count
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
let cell = tableView.dequeueReusableCell(withIdentifier: "TaskCell", for: indexPath) as! TaskCell
cell.setTask(task: tasks[indexPath.section][indexPath.row])
return cell
}
func tableView(_ tableView: UITableView, titleForHeaderInSection section: Int) -> String? {
guard taskHeaders.count > section else { return nil }
return taskHeaders[section]
}
func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath) {
// let's deselect this row, so that it doesn't stay selected when we come back
tableView.deselectRow(at: indexPath, animated: true)
// guard function against empty data
guard tasks[indexPath.section].count != 0 else { return }
// get the task from the data array, using the row that was tapped
let task = tasks[indexPath.section][indexPath.row]
// create our detail controller
let taskDetailViewController = TaskEditViewController()
taskDetailViewController.task = task
// push vc onto the nav stack
navigationController?.pushViewController(taskDetailViewController, animated: true)
}
}
extension ViewControllerDashboard: JCalendarDelegate, JCalendarDataSource {
func calendar(_ calendar: JCalendar, didSelectDate date: Date, selectedAutomatically: Bool, isReselecting: Bool) {
guard !isReselecting else { return }
// scroll to row
}
func calendar(_ calendar: JCalendar, markerColorForDate date: Date) -> UIColor? {
var color: UIColor?
// if a task exists in this date, mark the cell
if jcore.tasks.match(range: date.dayRange).fetchOrNil() != nil {
color = AppTheme.color()
}
return color
}
func calendar(_ calendar: JCalendar, willUpdateHeight height: CGFloat) {
let constant = calendar.headerHeight + height
guard calendarHeightConstraint.constant != constant else { return }
let animate = calendarHeightConstraint.constant != 150
view.layoutIfNeeded()
calendarHeightConstraint.constant = constant
if animate {
let anim = UIViewPropertyAnimator(duration: 0.3, curve: .linear) {
self.view.layoutIfNeeded()
}
anim.startAnimation()
} else {
view.layoutIfNeeded()
}
}
}
<file_sep>//
// CalendarDataTests.swift
// Prime PlannerTests
//
// Created by <NAME> on 10/26/18.
// Copyright © 2018 Poods. All rights reserved.
//
import Foundation
import XCTest
@testable import Prime_Planner
class CalendarDataTests: XCTestCase {
func testTaskInDateRangeRetrieval() {
// create our date
let date = Date(year: 2018, month: 12, day: 25)
// create the task given the due date
let task = insertTask(dueDate: date)
// get the day range (beginning of day to end of day)
let dateRange = date.dayRange
// test if database returns the inserted task at the given range
XCTAssertNotNil(jcore.tasks.match(range: dateRange).fetchOrNil())
// remove the task
removeTask(task)
}
func insertTask(dueDate: Date) -> Task {
// create a test task
let task = Task(name: "<NAME>!")
task.creationDate = Date()
task.dueDate = dueDate
task.note = "This is a note."
task.priority = .low
// insert the task into the database
jcore.save()
// save the id
let id = task.id
// get task from database
let getTask = jcore.tasks.match(id: id).fetchFirst()
// check if the task exists with the specified name
XCTAssertNotNil(getTask)
return getTask!
}
func removeTask(_ task: Task) {
// get task id
let id = task.id
// get task from database
let task = jcore.tasks.match(id: id).fetchFirst()
// check if the task exists with the specified name
XCTAssertNotNil(task)
// remove the task from the database
jcore.remove(task)
// check if the task is nil (was removed properly)
XCTAssertNil(jcore.tasks.match(id: id).fetchFirst())
}
}
<file_sep>//
// TaskEditViewController.swift
// Prime Planner
//
// Created by <NAME> on 9/19/18.
// Copyright © 2018 Poods. All rights reserved.
//
/*
create name field
add date calendar: for creation and due
*/
import Foundation
import UIKit
class TaskEditViewController: UIViewController {
private enum TaskFieldKey: String {
case date = "Due Date"
case priority = "Priority"
case note = "Note"
}
private let tableView = UITableView(frame: .zero, style: .grouped)
private let nameTextField = UITextField()
var task: Task?
private var dueDate: Date?
private var priority = TaskPriority.none
private var note = ""
// use a tuple of a property and value, for our taskFields
private var taskFields = [TaskFieldKey: String]()
private var taskFieldKeys: [TaskFieldKey] = [ .date, .priority, .note ]
override func viewDidLoad() {
super.viewDidLoad()
view.backgroundColor = .white
loadData()
loadTaskFields()
layoutTextField()
layoutTableView()
layoutDismissButtons()
// hide our navigation bar for this controller
// we will use our own navigation to get back
navigationController?.setNavigationBarHidden(true, animated: false)
}
func loadData() {
// check if task exists, else return (this means we're in creation mode)
guard let task = task else { return }
// load our data from the task (this means we're in view / edit mode)
nameTextField.text = task.name
dueDate = task.dueDate
priority = task.priority
note = task.note
}
func loadTaskFields() {
taskFields[.date] = dueDate?.string ?? "None"
taskFields[.priority] = priority.string
taskFields[.note] = note != "" ? note : "None"
}
func layoutTextField() {
// setup the name text field
nameTextField.translatesAutoresizingMaskIntoConstraints = false
nameTextField.placeholder = "Task Name"
nameTextField.textAlignment = .center
nameTextField.font = UIFont.systemFont(ofSize: 24)
nameTextField.returnKeyType = .done
nameTextField.delegate = self
view.addSubview(nameTextField)
// setup nameTF constraints (constrained to the top of the view)
NSLayoutConstraint.activate([
nameTextField.widthAnchor.constraint(equalTo: view.widthAnchor),
nameTextField.heightAnchor.constraint(equalToConstant: 80),
nameTextField.centerXAnchor.constraint(equalTo: view.centerXAnchor),
nameTextField.topAnchor.constraint(equalTo: view.topAnchor, constant: UIApplication.shared.statusBarFrame.height)
])
// if in creation mode, focus nameTextField
if task == nil {
nameTextField.becomeFirstResponder()
}
}
func layoutDismissButtons() {
let buttonsViewPadding: CGFloat = 15
// create the button container view
let buttonsView = UIView()
buttonsView.translatesAutoresizingMaskIntoConstraints = false
buttonsView.layer.cornerRadius = 10
buttonsView.layer.masksToBounds = true
buttonsView.layer.borderColor = UIColor(white: 0, alpha: 0.2).cgColor
buttonsView.layer.borderWidth = 2
view.addSubview(buttonsView)
// setup the constraints for the container
NSLayoutConstraint.activate([
buttonsView.widthAnchor.constraint(equalTo: view.widthAnchor, constant: -(buttonsViewPadding * 2)),
buttonsView.heightAnchor.constraint(equalToConstant: 50),
buttonsView.centerXAnchor.constraint(equalTo: view.centerXAnchor),
buttonsView.bottomAnchor.constraint(equalTo: view.bottomAnchor, constant: -buttonsViewPadding)
])
// setup the cancel button
let cancelButton = UIButton()
cancelButton.translatesAutoresizingMaskIntoConstraints = false
cancelButton.setTitle("Cancel", for: .normal)
cancelButton.setTitleColor(.black, for: .normal)
cancelButton.setTitleColor(UIColor(white: 0, alpha: 0.5), for: .highlighted)
cancelButton.backgroundColor = UIColor(white: 0.93, alpha: 1)
cancelButton.addTarget(self, action: #selector(cancelButtonPressed), for: .touchUpInside)
buttonsView.addSubview(cancelButton)
// setup the constraints for the cancel button
NSLayoutConstraint.activate([
cancelButton.widthAnchor.constraint(equalTo: buttonsView.widthAnchor, multiplier: 0.5),
cancelButton.heightAnchor.constraint(equalTo: buttonsView.heightAnchor),
cancelButton.centerYAnchor.constraint(equalTo: buttonsView.centerYAnchor),
cancelButton.leadingAnchor.constraint(equalTo: buttonsView.leadingAnchor)
])
// setup the done button
let doneButton = UIButton()
doneButton.translatesAutoresizingMaskIntoConstraints = false
doneButton.setTitle("Done", for: .normal)
doneButton.setTitleColor(.black, for: .normal)
doneButton.setTitleColor(UIColor(white: 0, alpha: 0.5), for: .highlighted)
doneButton.backgroundColor = UIColor(white: 0.88, alpha: 1)
doneButton.addTarget(self, action: #selector(doneButtonPressed), for: .touchUpInside)
buttonsView.addSubview(doneButton)
// setup the constraints for the done button
NSLayoutConstraint.activate([
doneButton.widthAnchor.constraint(equalTo: buttonsView.widthAnchor, multiplier: 0.5),
doneButton.heightAnchor.constraint(equalTo: buttonsView.heightAnchor),
doneButton.centerYAnchor.constraint(equalTo: buttonsView.centerYAnchor),
doneButton.trailingAnchor.constraint(equalTo: buttonsView.trailingAnchor)
])
}
func layoutTableView() {
// setup our tableview
tableView.translatesAutoresizingMaskIntoConstraints = false
tableView.delegate = self
tableView.dataSource = self
tableView.rowHeight = 60
tableView.keyboardDismissMode = .onDrag
view.addSubview(tableView)
// anchor tableview to the bounds of the main view
NSLayoutConstraint.activate([
tableView.widthAnchor.constraint(equalTo: view.widthAnchor),
tableView.heightAnchor.constraint(equalTo: view.heightAnchor),
tableView.centerXAnchor.constraint(equalTo: view.centerXAnchor),
tableView.topAnchor.constraint(equalTo: nameTextField.bottomAnchor)
])
// create a seperator view that will seperate the name field and the tableview
let sep = UIView()
sep.translatesAutoresizingMaskIntoConstraints = false
sep.backgroundColor = UIColor(white: 0.4, alpha: 1)
view.addSubview(sep)
NSLayoutConstraint.activate([
sep.widthAnchor.constraint(equalTo: view.widthAnchor),
sep.heightAnchor.constraint(equalToConstant: 2),
sep.centerXAnchor.constraint(equalTo: view.centerXAnchor),
sep.topAnchor.constraint(equalTo: tableView.topAnchor)
])
}
}
// tableView delegate and datasource
extension TaskEditViewController: UITableViewDelegate, UITableViewDataSource {
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return taskFieldKeys.count
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
var cell: UITableViewCell! = tableView.dequeueReusableCell(withIdentifier: "EditCell")
if cell == nil {
cell = UITableViewCell(style: .value1, reuseIdentifier: "EditCell")
}
let key = taskFieldKeys[indexPath.row]
cell.textLabel?.text = key.rawValue
cell.detailTextLabel?.text = taskFields[key]
return cell
}
func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath) {
// get the cell at the selected row
guard let cell = tableView.cellForRow(at: indexPath) else { return }
// deselect the current row, so that it doesn't stay highlighted
tableView.deselectRow(at: indexPath, animated: true)
// this will dismiss the keyboard if it is currently showing
view.endEditing(true)
// get the key for the taskField that we are selecting
let key = taskFieldKeys[indexPath.row]
// go to the specified selection view, based on the key
switch key {
case .date:
let date: Date
if let detail = cell.detailTextLabel?.text {
date = Date(string: detail)
} else {
date = dueDate ?? Date()
}
SelectionViewController.present(self, type: .calendar, object: date) { item, cancel in
guard !cancel else { return }
let date = item?.object as? Date
self.dueDate = date
self.taskFields[key] = date?.string ?? "None"
self.tableView.reloadData()
}
case .priority:
SelectionViewController.present(self, type: .priority, object: priority) { item, cancel in
guard !cancel, let priority = item?.object as? TaskPriority else { return }
self.priority = priority
self.taskFields[key] = priority.string
self.tableView.reloadData()
}
case .note:
SelectionViewController.present(self, type: .note, object: note) { item, cancel in
guard !cancel, let note = item?.object as? String else { return }
self.note = note
self.taskFields[key] = note
self.tableView.reloadData()
}
}
}
// dismiss the controller
// if task doesn't exist, dismiss as a modal
// if task exists, pop the controller from the navigation controller
@objc func dismissController() {
if task == nil {
dismiss(animated: true, completion: nil)
} else {
navigationController?.popViewController(animated: true)
}
}
}
// cancel, done button actions (must objc tag for button actions)
@objc extension TaskEditViewController {
func cancelButtonPressed() {
dismissController()
}
func doneButtonPressed() {
// guard against empty name field
// if field is empty, focus it and do not dismiss
guard let name = nameTextField.text, name != "" else {
nameTextField.becomeFirstResponder()
return
}
// get the original task, if it exists
// if not, create a new task
let task = self.task ?? Task(name: name)
// set all task values
task.name = name
task.dueDate = dueDate
task.priority = priority
task.note = note
// save the database; the task will be inserted/updated automatically
jcore.save()
// dismiss the edit controller
dismissController()
}
}
extension TaskEditViewController: UITextFieldDelegate {
// dismiss the keyboard when the return button is pressed
func textFieldShouldReturn(_ textField: UITextField) -> Bool {
textField.resignFirstResponder()
return true
}
}
<file_sep>//
// CategorySelectionView.swift
//
// Created by <NAME> on 10/1/18.
// Copyright © 2018 <NAME>. All rights reserved.
//
import Foundation
import UIKit
class SelectionViewController: UIViewController, UIViewControllerTransitioningDelegate {
fileprivate let animationController = SelectionAnimationController()
let tableView = UITableView(frame: .zero, style: .plain)
let noteTextView = UITextView()
var selectionType: SelectionType
var items: SelectionItems
var blur = UIVisualEffectView(effect: UIBlurEffect(style: .prominent))
var padding: CGFloat = 30
let tableContainer = UIView()
var buttonView: ButtonView!
var completion: ((_ item: SelectionItem?, _ cancelled: Bool) -> ())?
var initHandler: (() -> ())?
var heightConstraint: NSLayoutConstraint!
let selected: SelectionItem
var showsNoItemSection: Bool = false {
didSet {
tableView.reloadData()
}
}
override var prefersStatusBarHidden: Bool {
return true
}
let collectionView = UICollectionView(frame: .zero, collectionViewLayout: UICollectionViewFlowLayout())
func animationController(forPresented presented: UIViewController, presenting: UIViewController, source: UIViewController) -> UIViewControllerAnimatedTransitioning? {
animationController.presenting = true
animationController.scalingView = tableContainer
return animationController
}
func animationController(forDismissed dismissed: UIViewController) -> UIViewControllerAnimatedTransitioning? {
animationController.presenting = false
animationController.scalingView = tableContainer
return animationController
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
guard initHandler != nil else { return }
initHandler?()
initHandler = nil
if selectionType == .note {
noteTextView.becomeFirstResponder()
}
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
if selectionType == .note {
view.endEditing(true)
}
}
func loadData() {
var index: Int?
switch selectionType {
case .priority:
let types = items.priorityTypes
if let selected = selected.object as? TaskPriority, let i = types.firstIndex(of: selected) {
index = i
}
case .note:
noteTextView.text = selected.object as? String
case .calendar:
break;
}
if let i = index {
self.tableView.scrollToRow(at: IndexPath(row: i, section: self.showsNoItemSection ? 1 : 0), at: .middle, animated: false)
}
}
init(type: SelectionType, item: SelectionItem) {
selectionType = type
items = SelectionItems(type: selectionType)
selected = item
super.init(nibName: nil, bundle: nil)
layout()
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
func layout() {
// setup blur
blur.translatesAutoresizingMaskIntoConstraints = false
view.addSubview(blur)
NSLayoutConstraint.activate([
blur.widthAnchor.constraint(equalTo: view.widthAnchor),
blur.heightAnchor.constraint(equalTo: view.heightAnchor),
blur.centerXAnchor.constraint(equalTo: view.centerXAnchor),
blur.centerYAnchor.constraint(equalTo: view.centerYAnchor)
])
// setup container
tableContainer.translatesAutoresizingMaskIntoConstraints = false
tableContainer.clipsToBounds = true
tableContainer.setRadius(20.0)
tableContainer.setBorder(1, color: UIColor(white: 0, alpha: 0.6))
tableContainer.setScale(0.9)
blur.contentView.addSubview(tableContainer)
let widthConstraint = tableContainer.widthAnchor.constraint(equalTo: view.widthAnchor, constant: -(padding * 2))
let buttonHeight: CGFloat = 50
let yConstraint: NSLayoutConstraint
var heightConstant = safeInsets.top + safeInsets.bottom
let rowHeight: CGFloat = 60
switch selectionType {
case .note:
let pad: CGFloat = 60
heightConstant -= safeInsets.bottom
heightConstant += KeyboardService.height
heightConstant += pad * 2
yConstraint = tableContainer.topAnchor.constraint(equalTo: view.topAnchor, constant: safeInsets.top + pad)
heightConstraint = tableContainer.heightAnchor.constraint(equalTo: view.heightAnchor, constant: -heightConstant)
case .priority:
let height: CGFloat = rowHeight * CGFloat(items.count) + buttonHeight
yConstraint = tableContainer.centerYAnchor.constraint(equalTo: view.centerYAnchor)
heightConstraint = tableContainer.heightAnchor.constraint(equalToConstant: height)
case .calendar:
yConstraint = tableContainer.centerYAnchor.constraint(equalTo: view.centerYAnchor)
heightConstraint = tableContainer.heightAnchor.constraint(equalToConstant: 150)
}
NSLayoutConstraint.activate([
widthConstraint,
heightConstraint,
tableContainer.centerXAnchor.constraint(equalTo: view.centerXAnchor),
yConstraint
])
// setup buttons
let sep = UIView()
sep.translatesAutoresizingMaskIntoConstraints = false
sep.backgroundColor = UIColor(white: 0, alpha: 0.6)
var buttons = [ButtonViewButton]()
buttons.append(ButtonViewButton(title: "Cancel"))
if selectionType == .note {
buttons.append(ButtonViewButton(title: "Done"))
} else if selectionType == .calendar {
let button = ButtonViewButton(title: "Remove")
button.textColor = .red
buttons.append(button)
}
buttonView = ButtonView(buttons: buttons) { button in
if button.currentTitle == "Cancel" {
self.dismiss(nil, cancelled: true)
} else if button.currentTitle == "Done" {
self.doneButtonPressed()
} else if button.currentTitle == "Remove" {
self.dismiss(nil, cancelled: false)
}
}
buttonView.height = buttonHeight
buttonView.buttonColor = UIColor(white: 0.96, alpha: 1)
tableContainer.addSubview(buttonView)
tableContainer.addSubview(sep)
NSLayoutConstraint.activate([
sep.widthAnchor.constraint(equalTo: tableContainer.widthAnchor),
sep.heightAnchor.constraint(equalToConstant: 1),
sep.centerXAnchor.constraint(equalTo: tableContainer.centerXAnchor),
sep.bottomAnchor.constraint(equalTo: buttonView.topAnchor)
])
// setup tableview
tableView.translatesAutoresizingMaskIntoConstraints = false
tableView.rowHeight = rowHeight
tableView.delegate = self
tableView.dataSource = self
tableView.backgroundColor = .white
tableView.separatorColor = UIColor(white: 0, alpha: 0.4)
tableView.delaysContentTouches = false
tableView.keyboardDismissMode = .onDrag
tableView.reloadData()
for view in tableView.subviews {
if let scroll = view as? UIScrollView {
scroll.delaysContentTouches = false
break
}
}
tableContainer.insertSubview(tableView, belowSubview: buttonView)
NSLayoutConstraint.activate([
tableView.widthAnchor.constraint(equalTo: tableContainer.widthAnchor),
tableView.topAnchor.constraint(equalTo: tableContainer.topAnchor),
tableView.bottomAnchor.constraint(equalTo: buttonView.topAnchor),
tableView.centerXAnchor.constraint(equalTo: tableContainer.centerXAnchor)
])
switch selectionType {
case .note:
layoutNoteTextView()
case .priority:
tableView.isScrollEnabled = false
case .calendar:
layoutCalendarView()
}
}
func layoutNoteTextView() {
noteTextView.translatesAutoresizingMaskIntoConstraints = false
noteTextView.backgroundColor = .white
noteTextView.delegate = self
noteTextView.textColor = .black
noteTextView.autocapitalizationType = .sentences
noteTextView.font = UIFont.systemFont(ofSize: 22)
noteTextView.textContainerInset = UIEdgeInsets(top: 15, left: 10, bottom: 15, right: 10)
noteTextView.tintColor = .black
tableContainer.insertSubview(noteTextView, belowSubview: buttonView)
NSLayoutConstraint.activate([
noteTextView.widthAnchor.constraint(equalTo: tableContainer.widthAnchor),
noteTextView.centerXAnchor.constraint(equalTo: tableContainer.centerXAnchor),
noteTextView.topAnchor.constraint(equalTo: tableContainer.topAnchor),
noteTextView.bottomAnchor.constraint(equalTo: buttonView.topAnchor)
])
}
func doneButtonPressed() {
switch selectionType {
case .note:
var item: SelectionItem?
if let note = noteTextView.text, note.replacingOccurrences(of: " ", with: "") != "" {
item = SelectionItem()
item?.object = note
}
dismiss(item, cancelled: false)
default:
break
}
}
}
// Presentation / Dismiss
extension SelectionViewController {
func dismiss(_ item: SelectionItem?, cancelled: Bool) {
completion?(item, cancelled)
dismiss(animated: true, completion: nil)
}
class func present(_ parent: UIViewController, type: SelectionType, item: SelectionItem, showsNoItemSection: Bool, completion: ((_ item: SelectionItem?, _ cancelled: Bool) -> ())? = nil) {
let vc = SelectionViewController(type: type, item: item)
vc.transitioningDelegate = vc
vc.modalPresentationStyle = .custom
vc.initHandler = {
vc.completion = completion
vc.showsNoItemSection = showsNoItemSection
vc.loadData()
}
parent.present(vc, animated: true, completion: nil)
DispatchQueue.main.async {}
}
class func present(_ parent: UIViewController, type: SelectionType, object: Any?, completion: ((_ item: SelectionItem?, _ cancelled: Bool) -> ())? = nil) {
let item = SelectionItem()
item.object = object
present(parent, type: type, item: item, showsNoItemSection: false, completion: completion)
}
}
// UITextView delegates
extension SelectionViewController: UITextViewDelegate, UITextFieldDelegate {
func textViewShouldEndEditing(_ textView: UITextView) -> Bool {
return true
}
func textFieldShouldEndEditing(_ textField: UITextField) -> Bool {
return true
}
func textFieldShouldBeginEditing(_ textField: UITextField) -> Bool {
return true
}
func textFieldDidEndEditing(_ textField: UITextField) {
view.endEditing(true)
}
}
<file_sep>//
// Task.swift
// Prime Planner
//
// Created by <NAME> on 9/11/18.
// Copyright © 2018 Poods. All rights reserved.
//
import Foundation
import CoreData
import UIKit
enum TaskPriority: Int16 {
case none, low, medium, high
var string: String {
switch self {
case .low: return "Low"
case .medium: return "Medium"
case .high: return "High"
default: return "None"
}
}
var symbol: String {
switch self {
case .low: return "!"
case .medium: return "!!"
case .high: return "!!!"
default: return ""
}
}
var color: UIColor {
switch self {
case .low: return .purple
case .medium: return .orange
case .high: return .red
default: return .black
}
}
}
// we are going to manually manage the Task object, thus requiring it to be manually created
class Task: NSManagedObject {
// @NSManaged means this Task object is managed by the core database
@NSManaged var id: UUID
@NSManaged var name: String
@NSManaged var creationDate: Date
@NSManaged var dueDate: Date?
@NSManaged var note: String
@NSManaged var isChecked: Bool
// we disabled automatic management here, because we want
// to manage this specific property ourselves
var priority: TaskPriority {
set {
JManaged.set(newValue.rawValue, forKey: "priority", inObject: self)
}
get {
let value = JManaged.get("priority", inObject: self) as! Int16
return TaskPriority(rawValue: value)!
}
}
// here we create an convenience initializer that will insert a new Task with the given name into the database
convenience init(name: String) {
self.init(context: jcore.context)
self.id = UUID()
self.name = name
self.creationDate = Date()
self.priority = .none
self.note = ""
self.isChecked = false
}
}
<file_sep>//
// Date-Convenience.swift
// Prime Planner
//
// Created by <NAME> on 9/14/18.
// Copyright © 2018 Poods. All rights reserved.
//
import Foundation
func ==(lhs: DateRange, rhs: DateRange) -> Bool {
return lhs.id == rhs.id
}
func <(lhs: DateRange, rhs: DateRange) -> Bool {
return lhs.start < rhs.start
}
func >(lhs: DateRange, rhs: DateRange) -> Bool {
return lhs.start > rhs.start
}
// a class that will hold a start date and an end date
class DateRange: Hashable, Equatable {
var id = UUID()
var start: Date
var end: Date
var string: String {
return start.string
}
var hashValue: Int {
get {
return id.hashValue
}
}
init(start: Date, end: Date) {
if start > end {
self.start = end
self.end = start
} else {
self.start = start
self.end = end
}
}
init() {
self.start = Date()
self.end = Date()
}
func includes(date: Date) -> Bool {
start = start.beginningOfDay
end = end.endOfDay
return date.contained(in: self)
}
}
// here we extend the Date class
extension Date {
// declare a getter that retrieves the current calendar
private var calendar: Calendar! {
return Calendar.current
}
//MARK: component getters
static var veryShortWeekdaySymbols: [String] {
return Calendar.current.veryShortStandaloneWeekdaySymbols
}
var components: DateComponents {
return calendar.dateComponents([.year, .month, .day, .hour, .minute, .second], from: self)
}
var year: Int {
return calendar.dateComponents([.year], from: self).year!
}
var month: Int {
return calendar.dateComponents([.month], from: self).month!
}
var day: Int {
return calendar.dateComponents([.day], from: self).day!
}
var dayOfYear: Int {
return calendar.ordinality(of: .day, in: .year, for: self)!
}
var weekday: Int {
return calendar.ordinality(of: .weekday, in: .weekOfMonth, for: self)!
}
var weekOfYear: Int {
return calendar.component(.weekOfYear, from: self)
}
var hour: Int {
return calendar.dateComponents([.hour], from: self).hour!
}
var minute: Int {
return calendar.dateComponents([.minute], from: self).minute!
}
var second: Int {
return calendar.component(.second, from: self)
}
var nanosecond: Int {
return calendar.component(.nanosecond, from: self)
}
var amPM: String {
return hour <= 11 ? "AM" : "PM"
}
var numberOfWeeksInMonth: Int {
let range = calendar.range(of: .weekOfMonth, in: .month, for: self)!
return range.upperBound - range.lowerBound
}
// MARK: current dates
static var currentYear: Int {
return Date().year
}
static var currentMonth: Int {
return Date().month
}
static var currentDay: Int {
return Date().day
}
static var todayString: String {
return Date().string(format: "EEEE, MMM d")
}
var flat: Date {
return beginningOfDay
}
var beginningOfDay: Date {
var date = self
date.set(hour: 0)
date.set(minute: 0)
date.set(second: 0)
return date
}
var endOfDay: Date {
var components = calendar.dateComponents([.day, .month, .year], from: self)
components.day! += 1
components.hour = 0
components.minute = 0
components.second = -1
return calendar.date(from: components)!
}
var beginningOfWeek: Date {
let components = calendar.dateComponents([.yearForWeekOfYear, .weekOfYear], from: self)
return calendar.date(from: components)!
}
var endOfWeek: Date {
var startOfWeek = Date()
var interval: TimeInterval = 0
_ = calendar.dateInterval(of: .weekOfYear, start: &startOfWeek, interval: &interval, for: self)
var components = calendar.dateComponents([.day, .month, .year], from: startOfWeek)
components.day! += 7
components.hour = 0
components.minute = 0
components.second = -1
return calendar.date(from: components)!
}
var beginningOfMonth: Date {
var components = calendar.dateComponents([.day, .month, .year], from: self)
components.day = 1
components.hour = 0
components.minute = 0
components.second = 0
return calendar.date(from: components)!
}
var endOfMonth: Date {
var components = calendar.dateComponents([.day, .month, .year], from: self)
components.month! += 1
components.day = 1
components.hour = 0
components.minute = 0
components.second = -1
return calendar.date(from: components)!
}
var beginningOfYear: Date {
var components = calendar.dateComponents([.day, .month, .year], from: self)
components.month = 1
components.day = 1
components.hour = 0
components.minute = 0
components.second = 0
return calendar.date(from: components)!
}
var endOfYear: Date {
var components = calendar.dateComponents([.day, .month, .year], from: self)
components.year! += 1
components.month = 1
components.day = 0
components.hour = 0
components.minute = 0
components.second = -1
return calendar.date(from: components)!
}
//MARK: date manipulation
func adding(year: Int) -> Date {
var offsetComponents = DateComponents()
offsetComponents.year = year
return calendar.date(byAdding: offsetComponents, to: self)!
}
func adding(month: Int) -> Date {
var offsetComponents = DateComponents()
offsetComponents.month = month
return calendar.date(byAdding: offsetComponents, to: self)!
}
func adding(week: Int) -> Date {
return adding(day: week * 7)
}
func adding(day: Int) -> Date {
var offsetComponents = DateComponents()
offsetComponents.day = day
return calendar.date(byAdding: offsetComponents, to: self)!
}
func adding(hour: Int) -> Date {
var offsetComponents = DateComponents()
offsetComponents.hour = hour
return calendar.date(byAdding: offsetComponents, to: self)!
}
func adding(minute: Int) -> Date {
var offsetComponents = DateComponents()
offsetComponents.minute = minute
return calendar.date(byAdding: offsetComponents, to: self)!
}
func adding(second: Int) -> Date {
var offsetComponents = DateComponents()
offsetComponents.second = second
return calendar.date(byAdding: offsetComponents, to: self)!
}
mutating func set(year: Int) {
var components = self.components
components.year = year
self = calendar.date(from: components)!
}
mutating func set(month: Int) {
var components = self.components
components.month = month
self = calendar.date(from: components)!
}
mutating func set(day: Int) {
var components = self.components
components.day = day
self = calendar.date(from: components)!
}
mutating func set(hour: Int) {
var components = self.components
components.hour = hour
self = calendar.date(from: components)!
}
mutating func set(minute: Int) {
var components = self.components
components.minute = minute
self = calendar.date(from: components)!
}
mutating func set(second: Int) {
var components = self.components
components.second = second
self = calendar.date(from: components)!
}
// MARK: counting
func count(daysToDate date: Date) -> Int {
return calendar.dateComponents([.day], from: self, to: date).day!
}
func count(monthsToDate date: Date) -> Int {
return calendar.dateComponents([.month], from: self, to: date).month!
}
func count(yearsToDate date: Date) -> Int {
return calendar.dateComponents([.year], from: self, to: date).year!
}
var numberOfDaysInMonth: Int {
let range = calendar.range(of: .day, in: .month, for: self)!
return range.upperBound - range.lowerBound
}
func count(componentsToDate date: Date, unit: Calendar.Component) -> Int {
var result = 0
var _startDate = Date()
var _endDate = Date()
var _startInterval: TimeInterval = 0
var _endInterval: TimeInterval = 0
let startSuccess = calendar.dateInterval(of: unit, start: &_startDate, interval: &_startInterval, for: self)
let endSuccess = calendar.dateInterval(of: unit, start: &_endDate, interval: &_endInterval, for: date)
if startSuccess && endSuccess {
let difference = calendar.dateComponents([unit], from: _startDate, to: _endDate)
switch(unit) {
case .year:
result = difference.year!
case .month:
result = difference.month!
case .weekOfMonth:
result = difference.weekday!
case .day:
result = difference.day!
default:
result = 0
}
}
return result
}
//MARK: ranges
var dayRange: DateRange {
return DateRange(start: beginningOfDay, end: endOfDay)
}
var weekRange: DateRange {
return DateRange(start: beginningOfWeek, end: endOfWeek)
}
var monthRange: DateRange {
return DateRange(start: beginningOfMonth, end: endOfMonth)
}
var yearRange: DateRange {
return DateRange(start: beginningOfYear, end: endOfYear)
}
var dayAheadRange: DateRange {
return DateRange(start: self, end: adding(day: 1).adding(second: -1))
}
var monthAheadRange: DateRange {
return DateRange(start: self, end: adding(month: 1).adding(second: -1))
}
var yearAheadRange: DateRange {
return DateRange(start: self, end: adding(year: 1).adding(second: -1))
}
func range(forUnit unit: Calendar.Component) -> DateRange {
var endDate: Date
if unit == .day {
endDate = adding(day: 1)
} else if unit == .month {
endDate = adding(month: 1)
} else if unit == .year {
endDate = adding(year: 1)
} else {
endDate = Date()
}
return DateRange(start: self, end: endDate.adding(second: -1))
}
func contained(in range: DateRange) -> Bool {
return self >= range.start && self <= range.end
}
//MARK: date testing
func contained(within start: Date, to: Date) -> Bool {
let interval = timeIntervalSinceReferenceDate
let startInterval = start.timeIntervalSinceReferenceDate
let endInterval = to.timeIntervalSinceReferenceDate
return interval >= startInterval && interval <= endInterval
}
func hasSameDay(asDate date: Date) -> Bool {
let unitFlags: Set<Calendar.Component> = [.year, .month, .day]
let comp1 = calendar.dateComponents(unitFlags, from: self)
let comp2 = calendar.dateComponents(unitFlags, from: date)
return comp1.year == comp2.year && comp1.month == comp2.month && comp1.day == comp2.day
}
func hasSameMonth(asDate date: Date) -> Bool {
let unitFlags: Set<Calendar.Component> = [.year, .month]
let comp1 = calendar.dateComponents(unitFlags, from: self)
let comp2 = calendar.dateComponents(unitFlags, from: date)
return comp1.month == comp2.month && comp1.year == comp2.year
}
var isInToday: Bool {
return calendar.isDateInToday(self)
}
var isInTomorrow: Bool {
return calendar.isDateInTomorrow(self)
}
func isInSameDay(as date: Date) -> Bool {
return calendar.isDate(self, inSameDayAs: date)
}
//MARK: date creation
init(year: Int, month: Int, day: Int, hour: Int, minute: Int, second: Int) {
self.init()
var components = DateComponents()
components.year = year
components.month = month
components.day = day
components.hour = hour
components.minute = minute
components.second = second
self = calendar.date(from: components)!
}
init(year: Int, month: Int, day: Int) {
self.init(year: year, month: month, day: day, hour: 0, minute: 0, second: 0)
}
//MARK: formatting
var weekdayString: String {
return string(format: "EEEE")
}
var string: String {
let formatter = DateFormatter()
formatter.dateStyle = .long
return formatter.string(from: self)
}
func string(format: String) -> String {
let formatter = DateFormatter()
formatter.dateStyle = .none
formatter.dateFormat = format
return formatter.string(from: self)
}
func string(style: DateFormatter.Style) -> String {
let formatter = DateFormatter()
formatter.dateStyle = style
return formatter.string(from: self)
}
init(string: String, format: String? = nil) {
self.init()
let formatter = DateFormatter()
if let format = format {
formatter.dateStyle = .none
formatter.dateFormat = format
} else {
formatter.dateStyle = .long
}
self = formatter.date(from: string) ?? Date()
}
init?(string: String, formats: [String]) {
self.init()
var potentialMonth = string.trimmingCharacters(in: CharacterSet.decimalDigits).prefix(3)
var potentialYear = string.trimmingCharacters(in: CharacterSet.letters)
if potentialMonth.count != 3 {
potentialMonth = ""
}
if potentialYear.count != 4 {
potentialYear = ""
}
let potentialDate: String = potentialMonth + potentialYear
guard potentialDate.count != 0 else { return nil }
let formatter = DateFormatter()
var temp: Date?
var needsToSetYear = false
var passingFormatIndex: Int?
for (i, format) in formats.enumerated() {
formatter.dateFormat = format
if let date = formatter.date(from: potentialDate) {
needsToSetYear = format.range(of: "y", options: .caseInsensitive) == nil
temp = date
passingFormatIndex = i
break
}
}
var result = temp
if let _ = result {
let today = Date()
let calendar = Calendar.current
if needsToSetYear {
let todayComps = calendar.dateComponents([.year, .month], from: today)
var resultComps = calendar.dateComponents([.month], from: result!)
if resultComps.month! > todayComps.month! {
resultComps.year = todayComps.year! - 1
} else {
resultComps.year = todayComps.year
}
result = calendar.date(from: resultComps)
}
if let pos = passingFormatIndex {
result = calendar.date(bySetting: .second, value: pos, of: result!)
}
}
if let result = result {
self = result
} else {
return nil
}
}
}
<file_sep># Use Cases #
## Add Tasks ##
1. User Open Application
2. User clicks + at the bottom of the tasklist
3. A new view opens up
4. User adds title to task
5. User selects due date option
6. Calendar drop down menu appears
7. User selects date from dynamic calendar picker
8. User selects priority option
9. Drop down menu appears
10. User selects priorty
11. User enters description
12. User clicks done
13. Task appears in the task list
14. User can check the checkbox besides the task
## Delete tasks ##
1. User Swipes to the right and the task is deleted
## Navigate to other views ##
1. Open application
2. Select unhighlighted task item at the bottom of the screen
3. View changes to 1 of 3 other views
<file_sep>//
// ViewControllerCalender.swift
// Prime Planner
//
// Created by <NAME> on 10/4/18.
// Copyright © 2018 Poods. All rights reserved.
//
import UIKit
import Foundation
class ViewControllerCalender: UIViewController, JCalendarDelegate, JCalendarDataSource {
var calendar: JCalendar!
let tasksView = CalendarTaskListView()
var taskListTopConstraint: NSLayoutConstraint!
var date = Date()
var shouldFocusToday = true
var statusBarHidden = false {
didSet {
guard oldValue != statusBarHidden else { return }
UIView.animate(withDuration: 0.3) {
self.setNeedsStatusBarAppearanceUpdate()
}
}
}
override var prefersStatusBarHidden: Bool {
return statusBarHidden
}
override var preferredStatusBarUpdateAnimation: UIStatusBarAnimation {
return .slide
}
override func viewDidLoad() {
super.viewDidLoad()
navigationController?.setNavigationBarHidden(true, animated: false)
automaticallyAdjustsScrollViewInsets = false
view.backgroundColor = UIColor.white
layoutCalendar()
layoutTasksView()
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
calendar.collectionView.reloadData()
tasksView.finishFilterTaskOperation()
}
func layoutCalendar() {
calendar = JCalendar(date: Date())
calendar.delegate = self
calendar.dataSource = self
calendar.translatesAutoresizingMaskIntoConstraints = false
view.addSubview(calendar)
NSLayoutConstraint.activate([
calendar.widthAnchor.constraint(equalTo: view.widthAnchor),
calendar.topAnchor.constraint(equalTo: view.topAnchor, constant: UIApplication.shared.statusBarFrame.height),
calendar.bottomAnchor.constraint(equalTo: view.bottomAnchor),
calendar.centerXAnchor.constraint(equalTo: view.centerXAnchor)
])
}
func layoutTasksView() {
tasksView.translatesAutoresizingMaskIntoConstraints = false
tasksView.parent = self
view.addSubview(tasksView)
taskListTopConstraint = tasksView.topAnchor.constraint(equalTo: view.topAnchor)
NSLayoutConstraint.activate([
tasksView.centerXAnchor.constraint(equalTo: view.centerXAnchor),
taskListTopConstraint,
tasksView.widthAnchor.constraint(equalTo: view.widthAnchor),
tasksView.bottomAnchor.constraint(equalTo: view.bottomAnchor)
])
}
override var canBecomeFirstResponder: Bool {
return true
}
// Calendar Delegate
func calendar(_ calendar: JCalendar, didSelectDate date: Date, selectedAutomatically: Bool, isReselecting: Bool) {
guard !isReselecting else { return }
tasksView.filterTasksInDateRange(
date.dayRange
)
}
func calendar(_ calendar: JCalendar, markerColorForDate date: Date) -> UIColor? {
var color: UIColor?
// if a task exists in this date, mark the cell
if jcore.tasks.match(range: date.dayRange).fetchOrNil() != nil {
color = AppTheme.color()
}
return color
}
func calendar(_ calendar: JCalendar, willUpdateHeight height: CGFloat) {
let constant = UIApplication.shared.statusBarFrame.height + calendar.headerHeight + height
guard taskListTopConstraint.constant != constant else { return }
let animate = taskListTopConstraint.constant != 0
self.view.layoutIfNeeded()
taskListTopConstraint.constant = constant
if animate {
let anim = UIViewPropertyAnimator(duration: 0.3, curve: .linear) {
self.view.layoutIfNeeded()
}
anim.startAnimation()
} else {
self.view.layoutIfNeeded()
}
}
}
<file_sep>//
// KeyboardService.swift
//
// Created by <NAME> on 10/1/18.
// Copyright © 2018 <NAME>. All rights reserved.
//
import Foundation
import UIKit
// simple utility class that will help return the height of the system keyboard
class KeyboardService {
static var shared = KeyboardService()
static var height: CGFloat {
return shared.size.height
}
static var width: CGFloat {
return shared.size.width
}
private var size = CGRect.zero
private var observerRemoved = false
init() {
// observe the system keyboard show notification
NotificationCenter.default.addObserver(self, selector: #selector(keyboardWillShow(_:)), name: NSNotification.Name.UIKeyboardWillShow, object: nil)
}
deinit {
// remove the observer, if not already removed
if !observerRemoved {
NotificationCenter.default.removeObserver(self)
}
}
func setup(_ view: UIView) {
guard size != .zero else { return }
// creates an empty textfield, sets it as a responder and then resigns it
// this forces a call to the observer and retrieves the keyboard size
let tf = UITextField()
view.addSubview(tf)
tf.becomeFirstResponder()
tf.resignFirstResponder()
tf.removeFromSuperview()
}
@objc func keyboardWillShow(_ note: Notification) {
// gets the end frame of the keyboard from the notification info
// only runs if size is .zero, meaning it hasn't already been set
guard size == .zero, let info = note.userInfo, let value = info[UIKeyboardFrameEndUserInfoKey] as? NSValue else { return }
size = value.cgRectValue
NotificationCenter.default.removeObserver(self)
observerRemoved = true
}
}
<file_sep>//
// JCoreArray.swift
// JCore
//
// Created by <NAME> on 9/7/18.
// Copyright © 2018 <NAME>. All rights reserved.
//
import Foundation
import CoreData
struct JCoreArrayIterator: IteratorProtocol {
let array: [[String: Any]]
var index = 0
init(_ array: [[String: Any]]) {
self.array = array
}
mutating func next() -> [String: Any]? {
let processingIndex = index
guard processingIndex < array.count else { return nil }
index += 1
return array[processingIndex]
}
}
class JCoreArray<T: NSManagedObject>: Sequence {
private var data: [[String: Any]]
private var context: NSManagedObjectContext {
return JCore.shared.context
}
var count: Int {
return data.count
}
subscript(index: Int) -> [String: Any] {
return data[index]
}
init(data: [[String: Any]]) {
self.data = data
}
init() {
self.data = [[String: Any]]()
}
/// Returns the value of the given key and index of the core array.
func value(forKey key: String, atIndex index: Int) -> Any? {
guard index < data.count else { return nil }
return data[index][key]
}
/// Sorts the core array using the given block.
func sort(by areInIncreasingOrder: @escaping ([String: Any], [String: Any]) -> (Bool)) {
data.sort(by: areInIncreasingOrder)
}
/// Returns the index of the first dictionary where the key matches the given property.
func index<U: Equatable>(of object: Any, withKey key: String, type: U.Type) -> Int? {
guard let object = object as? U else { return nil }
return data.index { $0[key] as? U == object }
}
func makeIterator() -> JCoreArrayIterator {
return JCoreArrayIterator(data)
}
}
// object functions
extension JCoreArray {
/// Returns the object in the core array at the given index, if found.
func object(atIndex index: Int) -> T? {
guard index < data.count, let id = data[index]["object"] as? NSManagedObjectID else { return nil }
return context.object(with: id) as? T
}
/// Searches the dictionary for a JCoreArray object and returns it if found.
func object(fromDictionary dict: [String: Any]) -> T? {
guard let id = dict["object"] as? NSManagedObjectID else { return nil }
return object(with: id) as? T
}
/// Returns the object in the context with the given id
func object(with id: NSManagedObjectID) -> NSManagedObject {
return context.object(with: id)
}
/// Returns an array of every object value for the given keyPath.
func objectValues(forKeyPath keyPath: String) -> [Any] {
return data.map { dict -> Any in
let obj = self.object(fromDictionary: dict)!
return obj.value(forKeyPath: keyPath)!
}
}
/// Iterates through each object in the core array
func forEachObject(_ body: (T) -> Void) {
data.forEach { dict in
guard let obj = self.object(fromDictionary: dict) else { return }
body(obj)
}
}
/// Returns the index of the first object where the key matches the given property.
func indexOfObject<U: Equatable>(where key: String, equals property: Any, type: U.Type) -> Int? {
guard let property = property as? U else { return nil }
return data.index { dict -> Bool in
let obj = self.object(fromDictionary: dict)!
return obj.value(forKey: key) as? U == property
}
}
/// Sorts the objects in the core array using the given block.
func sortObjects(by areInIncreasingOrder: @escaping (T, T) -> (Bool)) {
data.sort { (d1, d2) -> Bool in
let o1 = context.object(with: d1["object"] as! NSManagedObjectID) as! T
let o2 = context.object(with: d2["object"] as! NSManagedObjectID) as! T
return areInIncreasingOrder(o1, o2)
}
}
}
<file_sep>//
// DashboardDataTests.swift
// Prime PlannerTests
//
// Created by <NAME> on 11/9/18.
// Copyright © 2018 Poods. All rights reserved.
//
import Foundation
import XCTest
@testable import Prime_Planner
class DashboardDataTests: XCTestCase {
func testTaskSectionRetrieval() {
// create dates for tasks
let today = Date()
let tomorrow = today.adding(day: 1)
let nextMonth = today.adding(month: 1)
// create the task given the due date
let todayTask = insertTask(dueDate: today)
let tomorrowTask = insertTask(dueDate: tomorrow)
let nextMonthTask = insertTask(dueDate: nextMonth)
// create ranges for day and week
let dayRange = Date().dayRange
let weekRange = DateRange(start: Date().adding(day: 1), end: Date().adding(day: 1).endOfWeek)
// fetch our tasks based on the created date ranges
let todayTasks = jcore.tasks.match(range: dayRange).sort("dueDate", ascending: true).fetch()
let weekTasks = jcore.tasks.match(range: weekRange).sort("dueDate", ascending: true).fetch()
let noDueDateTasks = jcore.tasks.filter("dueDate == nil").fetch()
let upcomingTasks = jcore.tasks.fetch().filter({ !todayTasks.contains($0) && !weekTasks.contains($0) && !noDueDateTasks.contains($0) })
// if all sectioning passes, this will remain true
var isTasksSectioned = true
// check each section, if any fails, 'isTasksSectioned' will return false
for task in todayTasks {
guard let due = task.dueDate else {
isTasksSectioned = false
break
}
if !due.isInToday {
isTasksSectioned = false
break
}
}
for task in weekTasks {
guard let due = task.dueDate else {
isTasksSectioned = false
break
}
if !due.contained(in: today.weekRange) {
isTasksSectioned = false
break
}
}
for task in upcomingTasks {
guard let due = task.dueDate else {
isTasksSectioned = false
break
}
if due.isInToday || due.contained(in: today.weekRange) || due < today {
isTasksSectioned = false
break
}
}
for task in noDueDateTasks {
if let _ = task.dueDate {
isTasksSectioned = false
break
}
}
// test if database returns the sectioned data as true
XCTAssert(isTasksSectioned)
// remove the task
removeTask(todayTask)
removeTask(tomorrowTask)
removeTask(nextMonthTask)
}
func insertTask(dueDate: Date) -> Task {
// create a test task
let task = Task(name: "<NAME>!")
task.creationDate = Date()
task.dueDate = dueDate
task.note = "This is a note."
task.priority = .low
// insert the task into the database
jcore.save()
// save the id
let id = task.id
// get task from database
let getTask = jcore.tasks.match(id: id).fetchFirst()
// check if the task exists with the specified name
XCTAssertNotNil(getTask)
return getTask!
}
func removeTask(_ task: Task) {
// get task id
let id = task.id
// get task from database
let task = jcore.tasks.match(id: id).fetchFirst()
// check if the task exists with the specified name
XCTAssertNotNil(task)
// remove the task from the database
jcore.remove(task)
// check if the task is nil (was removed properly)
XCTAssertNil(jcore.tasks.match(id: id).fetchFirst())
}
}
<file_sep>//
// JCore+Fetch.swift
// JCore
//
// Created by <NAME> on 9/6/18.
// Copyright © 2018 <NAME>. All rights reserved.
//
import CoreData
struct JCoreRequestKey {
fileprivate var key: String
fileprivate let groupBy: Bool
fileprivate var keyPath: String?
fileprivate var type: NSAttributeType?
init(key: String) {
self.key = key
self.groupBy = true
}
init(key: String, keyPath: String, type: NSAttributeType) {
self.key = key
self.keyPath = keyPath
self.type = type
self.groupBy = false
}
}
class JCoreRequest<T: NSManagedObject> {
private var request: NSFetchRequest<T>
private var sortHandler: ((T, T) -> (Bool))?
private var filterHandler: ((T) -> (Bool))?
/// Returns the count of objects represented by the current request.
var count: Int {
return fetch(keyPath: "@count") as! Int
}
init(request: NSFetchRequest<T>) {
self.request = request
}
/// Filters the data in the request with a handler that traverses each object.
func filter(_ filterHandler: @escaping ((T) -> (Bool))) -> JCoreRequest {
self.filterHandler = filterHandler
return self
}
/// Filters the data in the request using a predicate.
func filter(_ predicate: NSPredicate) -> JCoreRequest {
let newPredicate = predicate
// if we already have a predicate in this request, compound them, otherwise set the predicate.
if let predicate = request.predicate {
request.predicate = NSCompoundPredicate(andPredicateWithSubpredicates: [predicate, newPredicate])
} else {
request.predicate = newPredicate
}
return self
}
/// Filter the data in the request using a predicate format (uses SQL format).
func filter(_ format: String, _ arguments: Any...) -> JCoreRequest {
return filter(NSPredicate(format: format, argumentArray: arguments))
}
/// Sort the data in the request using a handler that traverses through the objects.
func sort(by handler: @escaping (T, T) -> (Bool)) -> JCoreRequest {
sortHandler = handler
return self
}
/// Sort the data using a descriptor.
func sort(_ descriptor: NSSortDescriptor) -> JCoreRequest {
// if we have no current descriptors, create an array to hold any new descriptors.
if request.sortDescriptors == nil {
request.sortDescriptors = [NSSortDescriptor]()
}
// append the passed descriptor to the array
request.sortDescriptors?.append(descriptor)
return self
}
/// Sort the data using a descriptor string.
func sort(_ descriptor: String, ascending: Bool, caseInsensitive: Bool = false) -> JCoreRequest {
let desc: NSSortDescriptor
if caseInsensitive {
desc = NSSortDescriptor(key: descriptor, ascending: ascending, selector: #selector(NSString.caseInsensitiveCompare(_:)))
} else {
desc = NSSortDescriptor(key: descriptor, ascending: ascending)
}
return sort(desc)
}
/// Limit the number of objects that will be returned.
func limit(_ limit: Int) -> JCoreRequest {
request.fetchLimit = limit
return self
}
/// Fetch any objects that match the given keyPath.
func fetch(keyPath: String, fromContext context: NSManagedObjectContext = JCore.shared.context) -> Any? {
var result: Any?
if let results = try? context.fetch(request) as NSArray {
result = results.value(forKeyPath: keyPath)
}
return result
}
/// Fetch any objects using request keys for expressions.
func fetch(keys: [JCoreRequestKey], distinct: Bool, includeObject: Bool = true, fromContext context: NSManagedObjectContext = JCore.shared.context) -> JCoreArray<T> {
// loop through keys and create expressions for each
var propertiesToGroupBy = [Any]()
var propertiesToFetch = [Any]()
for key in keys {
if key.groupBy {
propertiesToGroupBy.append(key.key)
}
if let keyPath = key.keyPath, let type = key.type {
let description = NSExpressionDescription()
description.expression = NSExpression(forKeyPath: keyPath)
description.name = key.key
description.expressionResultType = type
propertiesToFetch.append(description)
} else {
propertiesToFetch.append(key.key)
}
}
// create expression and description for the main object
if includeObject {
let expression = NSExpression.expressionForEvaluatedObject()
let description = NSExpressionDescription()
description.name = "object"
description.expression = expression
description.expressionResultType = .objectIDAttributeType
propertiesToFetch.append(description)
}
// setup the request
let className = NSStringFromClass(T.self).components(separatedBy: ".").last!
let request = NSFetchRequest<NSDictionary>(entityName: className)
request.resultType = .dictionaryResultType
request.propertiesToGroupBy = propertiesToGroupBy
request.propertiesToFetch = propertiesToFetch
request.returnsDistinctResults = distinct
request.predicate = self.request.predicate
request.sortDescriptors = self.request.sortDescriptors
request.fetchLimit = self.request.fetchLimit
// fetch objects based on the requested properties
var objects = [[String: Any]]()
do {
objects = try context.fetch(request) as! [[String : Any]]
} catch let e as NSError {
print("Core.loadAll()\nERROR: \(e)")
}
// create a core array wherein our data can be nested
let fetchedArray = JCoreArray<T>(data: objects)
// sort our data, if a handler was specified
if let sort = sortHandler {
fetchedArray.sortObjects(by: sort)
}
// return our array
return fetchedArray
}
/// Fetch all objects in the request as an array.
func fetch(fromContext context: NSManagedObjectContext = JCore.shared.context) -> [T] {
var objects = [T]()
context.performAndWait {
do {
objects = try context.fetch(request)
} catch let e as NSError {
print("Core.loadAll()\nERROR: \(e)")
}
if let sort = sortHandler {
objects.sort(by: sort)
}
if let filter = filterHandler {
objects = objects.filter(filter)
}
}
return objects
}
/// Fetch key objects in the request as an array or return nil if no objects were found.
func fetchOrNil(keys: [JCoreRequestKey], distinct: Bool, fromContext context: NSManagedObjectContext = JCore.shared.context) -> JCoreArray<T>? {
let results = fetch(keys: keys, distinct: distinct, fromContext: context)
return results.count != 0 ? results : nil
}
/// Fetch all objects in the request as an array or return nil if no objects were found.
func fetchOrNil(fromContext context: NSManagedObjectContext = JCore.shared.context) -> [T]? {
let results = fetch(fromContext: context)
return results.count != 0 ? results : nil
}
/// Fetch first object in the request.
func fetchFirst(fromContext context: NSManagedObjectContext = JCore.shared.context) -> T? {
return limit(1).fetch(fromContext: context).first
}
}
<file_sep>//
// Checkbox.swift
// Prime Planner
//
// Created by <NAME> on 9/16/18.
// Copyright © 2018 Poods. All rights reserved.
//
import Foundation
import UIKit
class Checkbox: UIButton {
var isChecked: Bool {
get {
return isSelected
}
set {
isSelected = newValue
}
}
private let imageOn = UIImage(named: "checked-1")
convenience init() {
self.init(frame: .zero)
// set our images for normal and selected states
setImage(nil, for: .normal)
setImage(imageOn, for: .selected)
// style the checkbox with a radius and border
setRadius(3)
setBorder(1, color: AppTheme.color())
}
func checkBoxTapped() {
// toggle between selected states
isSelected = !isSelected
}
}
<file_sep>//
// JCoreRequest-Task.swift
// Prime Planner
//
// Created by <NAME> on 9/12/18.
// Copyright © 2018 Poods. All rights reserved.
//
import Foundation
extension JCore {
var tasks: JCoreRequest<Task> {
return jcore.data(Task.self)
}
}
extension JCoreRequest where T: Task {
func match(id: UUID) -> JCoreRequest {
return filter("id == %@", id)
}
func match(range: DateRange) -> JCoreRequest {
return filter("dueDate >= %@ AND dueDate <= %@", range.start, range.end)
}
}
<file_sep># prime-planner
## Mission Statement ##
* For students currently enrolled in High school and College courses
* Our product assists students in keeping track of daily, weekly, and monthly tasks and preventing them from missing deadlines.
* Product Planner, an organizational app for students
* That leads to better grades, better jobs, better life, better future.
* Unlike other applications our app incentivizes students to study by adding leaderboards so that they can see how they measure up against other people. It incites a feeling of friendly competition among students .
* Our product offers daily incentives to maximize our user retention and increase the utility of the app.
<br><br> [**Product Backlog**](https://docs.google.com/spreadsheets/d/1HSBoC19WC9w0YWZ5xD9QxapS4QA8zcKgTGT2fqvcHXQ/edit#gid=0)
## Sprint 1 ##
* [Backlog](https://docs.google.com/spreadsheets/d/1EmF-StsJCDXHWK51zc_gVkav4ivncqkfktADfB3wTbw/edit#gid=0)
* [Burndown Chart](https://docs.google.com/spreadsheets/d/1EmF-StsJCDXHWK51zc_gVkav4ivncqkfktADfB3wTbw/edit#gid=0)
* [UML Diagram](https://github.com/PoodsProject/prime-planner/blob/master/PrimePlannerUMLSprint1.xml)
## Sprint 2 ##
* [Backlog](https://docs.google.com/spreadsheets/d/1Nysj_K4-QUsn_6kKBrEUE_sAhqnjedjYf9DnCkg8r1U/edit#gid=0)
* [Burndown Chart](https://docs.google.com/spreadsheets/d/1Nysj_K4-QUsn_6kKBrEUE_sAhqnjedjYf9DnCkg8r1U/edit#gid=0)
* UML Diagrams
* [Architecture](https://github.com/PoodsProject/prime-planner/blob/master/Sprint%202%20Architecture%20Design.xml)
* [Class](https://github.com/PoodsProject/prime-planner/blob/master/Sprint%202%20UML.xml)
* [Requirements](https://docs.google.com/document/d/1jTOyK_L0oS5ParN9OusFVdniWb1c-MgZWZLNv_J-OVw/edit?usp=sharing)
* Tests
* [Use Cases](https://github.com/PoodsProject/prime-planner/blob/master/Use_cases.md)
* [Test cases](https://github.com/PoodsProject/prime-planner/tree/master/Prime%20PlannerTests)
* [Demo](https://www.youtube.com/watch?v=peAU06BIvtg&feature=youtu.be)
## Sprint 3 ##
* [Backlog](https://docs.google.com/spreadsheets/d/1VA_NIfjlXeLH4OkxnsuecHzvKW_QF0dxLrnsrLLLM74/edit#gid=0)
* [Burndown Chart](https://docs.google.com/spreadsheets/d/1VA_NIfjlXeLH4OkxnsuecHzvKW_QF0dxLrnsrLLLM74/edit#gid=0)
* UML Diagrams
* [Architecture](https://github.com/PoodsProject/prime-planner/blob/master/Sprint%202%20Architecture%20Design.xml)
* [Class](https://github.com/PoodsProject/prime-planner/blob/master/Sprint%203%20UML.xml)
* [Requirements](https://docs.google.com/document/d/1c2AgvjSABRA5wteklnDUscMIhobxsfwpT0a17MMBafU/edit)
* Tests
* [Use Cases](https://docs.google.com/document/d/1TL1bNHVo5r9oRVgPzse9qoLFBiIqeA2EA4f0-ArcGvY/edit)
* [Test cases](https://github.com/PoodsProject/prime-planner/tree/master/Prime%20PlannerTests)
* [Demo](https://github.com/PoodsProject/prime-planner/blob/master/Demonstration_2.mp4)
* [Demo YouTube link](https://www.youtube.com/watch?v=N3FuuWRWOPE&feature=youtu.be)
<file_sep>//
// ButtonView.swift
//
// Created by <NAME> on 10/1/18.
// Copyright © 2018 <NAME>. All rights reserved.
//
import Foundation
import UIKit
class ButtonViewButton {
var title: String?
var image: String?
var width: CGFloat?
var button: UIButton?
var textColor: UIColor = .black {
didSet {
button?.setTitleColor(textColor, for: .normal)
if let button = button, let image = button.currentImage {
button.setImage(image.image(withColor: textColor), for: .normal)
}
}
}
init(title: String, width: CGFloat? = nil) {
self.title = title
self.width = width
}
init(image: String, width: CGFloat? = nil) {
self.image = image
self.width = width
}
init() {}
}
class ButtonView: UIView {
enum ButtonTitleType {
case `default`, image, button
}
fileprivate var heightConstraint: NSLayoutConstraint!
var height: CGFloat = 50 {
didSet {
if height != oldValue {
layoutIfNeeded()
heightConstraint?.constant = height + bottomPadding
layoutIfNeeded()
for button in buttons {
if let button = button.button {
var fontSize = height * (2/5)
if fontSize < 20 {
fontSize = 20
}
button.titleLabel?.font = UIFont.systemFont(ofSize: fontSize)
}
}
}
}
}
var bottomPadding: CGFloat = 0
var action: ((_ button: UIButton) -> ())?
var buttons: [ButtonViewButton]!
var autoFixesToView = true
var buttonView = UIView()
var buttonColor = UIColor(white: 0.96, alpha: 1) {
didSet {
backgroundColor = buttonColor
for button in buttons {
button.button?.backgroundColor = buttonColor
}
}
}
var textColor = UIColor.black {
didSet {
for button in buttons {
button.textColor = textColor
}
}
}
fileprivate init(titles: [String]?, buttons: [ButtonViewButton]?, type: ButtonTitleType, action: ((_ button: UIButton) -> ())? = nil) {
self.action = action
if type == .button {
self.buttons = buttons
}
super.init(frame: CGRect.zero)
translatesAutoresizingMaskIntoConstraints = false
if let titles = titles , type != .button {
setupButtonsForTitles(titles, titlesAreImages: type == .image)
}
setup()
}
override func didMoveToSuperview() {
super.didMoveToSuperview()
if let view = superview, autoFixesToView {
heightConstraint = heightAnchor.constraint(equalToConstant: height + bottomPadding)
NSLayoutConstraint.activate([
centerXAnchor.constraint(equalTo: view.centerXAnchor),
bottomAnchor.constraint(equalTo: view.bottomAnchor),
widthAnchor.constraint(equalTo: view.widthAnchor),
heightConstraint
])
}
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
func setupButtonsForTitles(_ titles: [String], titlesAreImages: Bool = false) {
buttons = [ButtonViewButton]()
for title in titles {
let button = ButtonViewButton()
if titlesAreImages {
button.image = title
} else {
button.title = title
}
buttons.append(button)
}
}
func setup() {
backgroundColor = buttonColor
buttonView.backgroundColor = .black
buttonView.translatesAutoresizingMaskIntoConstraints = false
addSubview(buttonView)
NSLayoutConstraint.activate([
buttonView.heightAnchor.constraint(equalToConstant: height),
buttonView.widthAnchor.constraint(equalTo: widthAnchor),
buttonView.topAnchor.constraint(equalTo: topAnchor),
buttonView.centerXAnchor.constraint(equalTo: centerXAnchor)
])
let distance: CGFloat = buttons.count > 1 ? 2 : 0
var fixedButtons = [ButtonViewButton]()
var fixedButtonsWidth: CGFloat = 0
for button in buttons! {
if let width = button.width {
fixedButtons.append(button)
fixedButtonsWidth += width
}
}
//calculate fixed buttons width
var fixedWidth: CGFloat = 0
if fixedButtons.count != 0 {
fixedWidth = (fixedButtonsWidth / CGFloat(fixedButtons.count)) - (distance / CGFloat(fixedButtons.count))
}
var previousVariableButton: UIButton?
var previousConstantButton: UIButton?
for (i, button) in buttons.enumerated() {
var newButtonWidth: CGFloat?
if let _ = button.width { //fixed button
newButtonWidth = fixedWidth
}
button.button = addButton(button.title ?? button.image ?? "", textColor: button.textColor, isImage: button.image != nil, previousVariableButton: previousVariableButton, previousConstantButton: previousConstantButton, constantWidth: newButtonWidth, isLast: i == buttons.count - 1)
if newButtonWidth == nil {
previousConstantButton = nil
previousVariableButton = button.button
} else {
previousConstantButton = button.button
}
}
}
// adds a new button to the button view
private func addButton(
_ name: String,
textColor: UIColor,
isImage: Bool,
previousVariableButton: UIButton?,
previousConstantButton: UIButton?,
constantWidth: CGFloat?,
isLast: Bool
) -> UIButton {
let distance: CGFloat = buttons.count > 1 ? 2 : 0
let button = UIButton()
button.translatesAutoresizingMaskIntoConstraints = false
button.backgroundColor = buttonColor
button.setTitleColor(textColor, for: .normal)
button.addTarget(self, action: #selector(ButtonView.down(_:)), for: [.touchDown, .touchDragEnter])
button.addTarget(self, action: #selector(ButtonView.exit(_:)), for: [.touchDragExit, .touchCancel])
button.addTarget(self, action: #selector(ButtonView.up(_:)), for: [.touchUpInside])
button.adjustsImageWhenHighlighted = false
if isImage {
button.setImage(UIImage(named: name)?.image(withColor: textColor), for: .normal)
} else {
button.setTitle(name, for: UIControlState())
var fontSize = height * (2/5)
if fontSize < 20 {
fontSize = 20
}
button.titleLabel?.font = UIFont.systemFont(ofSize: fontSize)
}
buttonView.addSubview(button)
var constraints = [NSLayoutConstraint]()
if let previousVariableButton = previousVariableButton {
if let previousConstantButton = previousConstantButton {
constraints.append(contentsOf: [
button.leadingAnchor.constraint(equalTo: previousConstantButton.trailingAnchor, constant: distance / 2),
])
if isLast && constantWidth != nil {
constraints.append(contentsOf: [
button.trailingAnchor.constraint(equalTo: trailingAnchor)
])
}
} else {
if isLast && constantWidth != nil {
constraints.append(contentsOf: [
button.trailingAnchor.constraint(equalTo: trailingAnchor),
previousVariableButton.trailingAnchor.constraint(equalTo: button.leadingAnchor, constant: -(distance / 2))
])
} else {
constraints.append(contentsOf: [
button.leadingAnchor.constraint(equalTo: previousVariableButton.trailingAnchor, constant: distance / 2),
])
}
}
if constantWidth == nil {
constraints.append(contentsOf: [
button.widthAnchor.constraint(equalTo: previousVariableButton.widthAnchor),
previousVariableButton.widthAnchor.constraint(equalTo: button.widthAnchor)
])
}
} else if let previousConstantButton = previousConstantButton {
constraints.append(contentsOf: [
button.leadingAnchor.constraint(equalTo: previousConstantButton.trailingAnchor, constant: distance / 2),
])
} else {
constraints.append(
button.leadingAnchor.constraint(equalTo: leadingAnchor)
)
}
if let constantWidth = constantWidth {
constraints.append(contentsOf: [
button.widthAnchor.constraint(equalToConstant: constantWidth)
])
} else if isLast {
constraints.append(
button.trailingAnchor.constraint(equalTo: trailingAnchor)
)
}
constraints.append(contentsOf: [
button.heightAnchor.constraint(equalToConstant: height),
button.topAnchor.constraint(equalTo: topAnchor)
])
NSLayoutConstraint.activate(constraints)
return button
}
}
// Button Selectors
extension ButtonView {
@objc func down(_ sender: UIButton) {
sender.backgroundColor = sender.backgroundColor?.withAlphaComponent(0.8)
}
@objc func exit(_ sender: UIButton) {
sender.backgroundColor = sender.backgroundColor?.withAlphaComponent(1.0)
}
@objc func up(_ sender: UIButton) {
self.exit(sender)
action?(sender)
}
}
// Initializers
extension ButtonView {
convenience init(title: String, action: ((_ button: UIButton) -> ())? = nil) {
self.init(titles: [title], action: action)
}
convenience init(image: String, action: ((_ button: UIButton) -> ())? = nil) {
self.init(images: [image], action: action)
}
convenience init(button: ButtonViewButton, action: ((_ button: UIButton) -> ())? = nil) {
self.init(buttons: [button], action: action)
}
convenience init(titles: [String], action: ((_ button: UIButton) -> ())? = nil) {
self.init(titles: titles, buttons: nil, type: .default, action: action)
}
convenience init(images: [String], action: ((_ button: UIButton) -> ())? = nil) {
self.init(titles: images, buttons: nil, type: .image, action: action)
}
convenience init(buttons: [ButtonViewButton], action: ((_ button: UIButton) -> ())? = nil) {
self.init(titles: nil, buttons: buttons, type: .button, action: action)
}
}
|
33e36a57c2d66c233303f23cb077505adc33eb24
|
[
"Swift",
"Markdown"
] | 35 |
Swift
|
jacobcaraballo/prime-planner
|
66bfbbf68e4aedc6c59b7b45dd565806cdb125a7
|
56f7c9933eaac05fdf13d4918ad029f5fa0080f4
|
refs/heads/master
|
<repo_name>naassi/TaxiLog<file_sep>/README.md
# TaxiLog
I am currently working as a taxi driver while learning to code, I have built this very basic command line program using nodejs to keep track of my jobs. I am going to build more functions to work with the data, also I think I need to use a date object on every log rather than a string?
<file_sep>/app.js
const colors = require('colors');
const prompt = require('prompt');
const fs = require('fs');
prompt.message = ('-->'.yellow);
var Job = function(date, start, dest, price, type) {
this.date = date;
this.start = start;
this.dest = dest;
this.price = price;
this.type = type;
};
var Expense = function(date, item, cost) {
this.date = date;
this.item = item;
this.cost = cost;
}
var Log = function(date, job) {
this.date = date;
this.jobs = [job];
this.dailyExpenses = [];
};
var addExpense = function() {
console.log('Enter expense info'.green);
prompt.start();
prompt.get(['date', 'item', 'cost'],
function(err, results) {
if(err) { console.log(err.message) };
var expense = new Expense(results.date, results.item, results.cost);
expense = JSON.stringify(expense);
addToLog('expense', expense);
console.log('Would you like to enter another expense?'.green);
prompt.get(['(y/n)'], function(err, results) {
if(err) { console.log(err.message);}
else if(results['(y/n)'] === 'y') {
return addExpense();
}
else { return console.log('exiting...'.green);}
});
});
};
var addJob = function() {
console.log('Enter job info'.green);
prompt.start();
prompt.get(['date', 'start', 'dest', 'price', 'type'],
function(err, results) {
if(err) { console.log(err.message) };
var job = new Job(results.date, results.start, results.dest, results.price, results.type);
job = JSON.stringify(job);
addToLog('job', job);
console.log('Would you like to enter another job?'.green);
prompt.get(['(y/n)'], function(err, results) {
if(err) { console.log(err.message);}
else if(results['(y/n)'] === 'y') {
return addJob();
}
else { return console.log('exiting...'.green);}
});
});
};
var addToLog = function(type, entry) {
entry = JSON.parse(entry);
var data = fs.readFileSync('./log.json', 'utf-8');
data = JSON.parse(data);
var hasLog = false;
for(var i = 0; i < data.length; i ++) {
var log = data[i];
if(log.date === entry.date) {
if(type === 'job') {
log.jobs.push(entry);
hasLog = true;
break; }
else if(type === 'expense') {
log.dailyExpenses.push(entry)
}
}
}
if(!hasLog && type === 'job') { data.push(new Log(entry.date, entry)); }
fs.writeFileSync('./log.json', JSON.stringify(data));
}
var listJobs = function() {
console.log('Enter the date you would like to search for' + '(eg: dd/mm/yy)'.green);
prompt.start();
prompt.get(['date'], function(err, results){
var date = results.date;
var data = fs.readFileSync('./log.json', 'utf-8');
data = JSON.parse(data);
for(var i = 0; i < data.length; i ++) {
if(data[i].date === date) {
var totalCash = 0;
var totalAccount = 0;
console.log(colors.yellow('\n\nJobs for ' + results.date));
console.log('-------------------'.yellow);
for(var item in data[i].jobs) {
var current = data[i].jobs[item];
if(current.type === 'cash') {
totalCash += Number(current.price);
}
if(current.type === 'account') {
totalAccount += Number(current.price);
}
console.log(current.start + ' to ' + current.dest + ' : £' + current.price + ' ' + current.type);
}
console.log(colors.green('The total for the day is £' + totalCash + ' cash and £' + totalAccount + ' account.\n\n'));
var totalExpenses = 0;
for(var prop in data[i].dailyExpenses) {
var expense = data[i].dailyExpenses[prop];
totalExpenses += Number(expense.cost);
console.log(expense.item + ' £' + expense.cost);
}
console.log(colors.red('Total expenses: £' + totalExpenses));
}
}
});
}
var run = function() {
console.log('\n\n\n----Taxi Log----\n'.yellow+
'What would you like to do?\n'+
' a: to addJob\n' +
' l: to listJobs\n' +
' x: to addExpense\n' +
' e: to exit');
prompt.start();
prompt.get(['answer'], function(err, results) {
if(err) { console.log(err.message) }
else if(results.answer === 'a') { return addJob(); }
else if(results.answer ==='l') { return listJobs();}
else if(results.answer === 'x') { return addExpense();}
else if(results.answer === 'e') { return;}
});
}
var backup = function() {
var data = fs.readFileSync('./log.json');
}
run();
|
befabdabff0243b3ad66a555402c23778e066a87
|
[
"Markdown",
"JavaScript"
] | 2 |
Markdown
|
naassi/TaxiLog
|
10812b9db6e2bc7a8aab79eca10618d43b2bb693
|
0a8603ae7425c3ea7a57649d4f29d7c5a02b770c
|
refs/heads/master
|
<file_sep>FROM node:10
WORKDIR /usr/src/app
ENV PORT 8080
ENV HOST 0.0.0.0
COPY package*.json ./
RUN npm install
COPY . .
CMD npm start<file_sep>const controllers = require('./controllers')
module.exports = (server) => {
// about//
server.route('/')
.get(controllers.about)
//mailchimp newsletter subscription//
server.route('/subscribe')
.post(controllers.subscribe)
server.route('/sendmail')
.post(controllers.sendMail)
}<file_sep>const express = require('express')
const cors = require('cors')
const PORT = process.env.PORT || 5000
const Mailchimp = require('mailchimp-api-v3')
require('dotenv').config()
//enviroment variables//
const mailchimpAPIKey = process.env.MAILCHIMP_API_KEY
const audienceID = process.env.AUDIENCE_ID
// initialing server//
const server = express()
// setting up for cors//
server.use(cors())
// parse various data//
server.use(express.json())
server.use(express.urlencoded())
// parsing routes//
const routes = require('./api/routes')
routes(server)
//setting up the port//
server.listen(PORT, () => {
console.log(`Mailing micro-service is now active on port ${PORT}`)
})
|
fc766425ca46d07ee4bb15c0f16f3d525ef03652
|
[
"JavaScript",
"Dockerfile"
] | 3 |
Dockerfile
|
DeFipedia/defipedia-mailing-service
|
7417f4bab9e9a7c2a2043e8472e34860a8b5e64e
|
e4058e4586d8630ecc6dcb1290604b45d8178526
|
refs/heads/master
|
<repo_name>lussierc/africancountryinfocenter.netlify.app<file_sep>/_data/blog/niger.md
---
template: BlogPost
path: /niger
date: 2020-11-23T07:08:53.137Z
title: Niger
thumbnail: 'https://cdn.britannica.com/54/144054-050-70F4BA48/Abuja-Nigeria.jpg'
metaDescription: Country Page Post
---
# General Information about Niger
- Name: Niger
- African Abbreviation: NG
- Capital City: Niamey
- Population: 24,206,644
- African Subregion: Western Africa
### Country Flag

## Economic Information
- GDP (thousands): 12,928,145.12
- GDP Growth (annual %) in 2019: 5.8
- GDP Per Capita: 554.6
## A Brief Overview of Niger
Niger is one of the hottest countries in the world and until it's independence in 1960, was part of French West Africa. The Hausa are the dominant group in the country. There are five national languages in additional to French, the official language.
#### References:
- [Capital Image](https://cdn.britannica.com/54/144054-050-70F4BA48/Abuja-Nigeria.jpg)
- [Flags](https://github.com/hjnilsson/country-flags)
- [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
- [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
- [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
- [Economic Information](https://data.worldbank.org/)
- [Writeup Information](https://www.everyculture.com/Ma-Ni/Niger.html)
<file_sep>/_data/blog/seychelles.md
---
template: BlogPost
path: /seychelles
date: 2020-11-23T07:08:53.137Z
title: Seychelles
thumbnail: 'https://www.worldatlas.com/upload/95/35/b0/victoria-seychelles-little-big-ben-clock-tower.jpg'
metaDescription: Country Page Post
---
# General Information about Seychelles
- Name: Seychelles
- African Abbreviation: SY
- Capital City: Victoria
- Population: 98,347
- African Subregion: Eastern Africa
### Country Flag

## Economic Information
- GDP (thousands): 1,698,843.06
- GDP Growth (annual %) in 2019: 4.7
- GDP Per Capita: 17401.7
## A Brief Overview of Seychelles
The country features beautiful beaches as an island nation. The art and culture of the country has African, Asian, and European influences. There are many religions like Christianity, Islam, and Hinduism. Creole is spoken throughout the country while English/French are the languages used by the government. In Seychellois cuisine, rice, curry and coconut milk, used as staple foods, will also recall these multiple origins.
#### References:
- [Capital Image](https://www.worldatlas.com/upload/95/35/b0/victoria-seychelles-little-big-ben-clock-tower.jpg)
- [Flags](https://github.com/hjnilsson/country-flags)
- [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
- [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
- [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
- [Economic Information](https://data.worldbank.org/)
- [Writeup Information](https://www.villanovo.com/guides/seychelles/culture-traditions)
<file_sep>/_data/blog/lesotho.md
---
template: BlogPost
path: /lesotho
date: 2020-11-23T07:08:53.137Z
title: Lesotho
thumbnail: 'https://cdn.britannica.com/18/195818-050-5E2E93BA/Maseru-Lesotho.jpg'
metaDescription: Country Page Post
---
# General Information about Lesotho
- Name: Lesotho
- African Abbreviation: LE
- Capital City: Maseru
- Population: 2,142,249
- African Subregion: Southern Africa
### Country Flag

## Economic Information
- GDP (thousands): 2,460,072.44
- GDP Growth (annual %) in 2019: 1.5
- GDP Per Capita: 1157.5
## A Brief Overview of Lesotho
Traditional authority in the country is still exercised through a system of chieftaincy, which extends from the king through the chiefs at the village level. Despite increasing urbanization and the growth of modern institutions, many Sotho are still interested in building a rural homestead and remaining loyal to the chieftaincy system.
Urban life is a blend of traditional and Western culture. Many buildings in the country were burned during looting following the election in 1998, but there are many modern and Western hotels, nightclubs, and more. Village life on the other hand, is dominated by basic agricultural tasks. Lesotho observes most Christian holidays.
#### References:
- [Capital Image](https://cdn.britannica.com/18/195818-050-5E2E93BA/Maseru-Lesotho.jpg)
- [Flags](https://github.com/hjnilsson/country-flags)
- [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
- [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
- [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
- [Economic Information](https://data.worldbank.org/)
- [Writeup Information](https://www.britannica.com/place/Lesotho/Cultural-life)
<file_sep>/_data/blog/namibia.md
---
template: BlogPost
path: /namibia
date: 2020-11-23T07:08:53.137Z
title: Namibia
thumbnail: 'https://lh3.googleusercontent.com/proxy/pNL5ngQDOvvcdwT-xDbSNOtDzl-FUYOr2hgiFOc19kqGnea7loWvfn-X_0trnbvOm7Af4W7n3LuaVn-24vIk-04cY0IhnFpAVKwoeBF0ftLaBr7L90Y'
metaDescription: Country Page Post
---
# General Information about Namibia
- Name: Namibia
- African Abbreviation: NM
- Capital City: Windhoek
- Population: 2,540,905
- African Subregion: Southern Africa
### Country Flag

## Economic Information
- GDP (thousands): 12,366,527.72
- GDP Growth (annual %) in 2019: -1.1
- GDP Per Capita: 4957.5
## A Brief Overview of Namibia
Namibia is a sparsely populated country. German is a widely spoken language today and the culture and cuisine of the country have German influences. There are many different cultures spread throughout the country.
#### References:
- [Capital Image](https://lh3.googleusercontent.com/proxy/pNL5ngQDOvvcdwT-xDbSNOtDzl-FUYOr2hgiFOc19kqGnea7loWvfn-X_0trnbvOm7Af4W7n3LuaVn-24vIk-04cY0IhnFpAVKwoeBF0ftLaBr7L90Y)
- [Flags](https://github.com/hjnilsson/country-flags)
- [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
- [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
- [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
- [Economic Information](https://data.worldbank.org/)
- [Writeup Information](https://www.rhinoafrica.com/en/destinations/namibia/facts-and-information/culture/77461)
<file_sep>/_data/blog/guinea-bissau.md
---
template: BlogPost
path: /guinea-bissau
date: 2020-11-23T07:08:53.137Z
title: Guinea-Bissau
thumbnail: 'https://cdn.britannica.com/93/154493-050-142F8CF0/Bissau-Guinea-Bissau.jpg'
metaDescription: Country Page Post
---
# General Information about Guinea-Bissau
- Name: Guinea-Bissau
- African Abbreviation: RG
- Capital City: Bissau
- Population: 1,968,001
- African Subregion: Western Africa
### Country Flag

## Economic Information
- GDP (thousands): 1,340,389.41
- GDP Growth (annual %) in 2019: 4.6
- GDP Per Capita: 697.8
## A Brief Overview of Guinea-Bissau
The culture is very colorful thanks to the people's diverse ethnic backgrounds. The population is made up of different tribes with distinct languages, social structures, and customs. Prominent groups include Fula, Mandinka, Balanta, Papel, and more.
Music is a big part of life, while this tradition is connected to polyrhythmic Gumbe genres. The most common instrument is the calabash. Most of the population adhered to animist beliefs even during colonization. Today, the major practice is Islam, followed by Christianity.
#### References:
- [Capital Image](https://cdn.britannica.com/93/154493-050-142F8CF0/Bissau-Guinea-Bissau.jpg)
- [Flags](https://github.com/hjnilsson/country-flags)
- [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
- [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
- [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
- [Economic Information](https://data.worldbank.org/)
- [Writeup Information](https://www.iexplore.com/articles/travel-guides/africa/guinea-bissau/history-and-culture)
<file_sep>/_data/blog/mali.md
---
template: BlogPost
path: /mali
date: 2020-11-23T07:08:53.137Z
title: Mali
thumbnail: 'https://www.worldatlas.com/upload/da/c8/e6/bamako-mali-capital-city.jpg'
metaDescription: Country Page Post
---
# General Information about Mali
- Name: Mali
- African Abbreviation: ML
- Capital City: Bamako
- Population: 20,250,833
- African Subregion: Western Africa
### Country Flag

## Economic Information
- GDP (thousands): 17,510,141.17
- GDP Growth (annual %) in 2019: 5
- GDP Per Capita: 890.7
## A Brief Overview of Mali
Mali is one of the world's poorest countries today, but was once one of Africa's mightiest empires. The nomadic desert lifestyle of Northern tribes has remained relatively unchanged for centuries. The diverse cultures of the country peacefully interact with each other. Each of Mali's own ethnic groups have their own languages.
#### References:
- [Capital Image](https://www.worldatlas.com/upload/da/c8/e6/bamako-mali-capital-city.jpg)
- [Flags](https://github.com/hjnilsson/country-flags)
- [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
- [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
- [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
- [Economic Information](https://data.worldbank.org/)
- [Writeup Information](https://www.iexplore.com/articles/travel-guides/africa/mali/history-and-culture)
<file_sep>/_data/blog/djibouti.md
---
template: BlogPost
path: /djibouti
date: 2020-11-23T07:08:53.137Z
title: Djibouti
thumbnail: 'https://www.worldatlas.com/upload/d5/71/52/shutterstock-543241918.jpg'
metaDescription: Country Page Post
---
# General Information about Djibouti
- Name: Djibouti
- African Abbreviation: RD
- Capital City: Djibouti
- Population: 988,000
- African Subregion: Eastern Africa
### Country Flag

## A Brief Overview of Djibouti
The country is a crossroad of trade and commerce, while being heavily influenced by it's ethnic clusters. The two dominant groups are the Afar and the Issa-Somali. The Issa Somali make up 60% of the population and are largely Muslim. Ethnic traditions still live in the country.
They were colonized by the French in 1862, till the two groups began to rebel against the French then they left in 1977 giving the country independence. The country has entered an era of peace since 2000.
There are many different traditions in the country, both modern and traditional. The official languages of the country are Arabic and French, though the multi-ethnic populations speak Somali and Afar. Religion, notably Islam, is still a main element of Djibouti culture. Music is an important part of everyday life, while folklore is still prevalent, and the locals dress to match the hot climate they reside in.
#### References:
- [Capital Image](https://www.worldatlas.com/upload/d5/71/52/shutterstock-543241918.jpg)
- [Flags](https://github.com/hjnilsson/country-flags)
- [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
- [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
- [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
- [Economic Information](https://data.worldbank.org/)
- [Writeup Information](https://www.iexplore.com/articles/travel-guides/africa/djibouti/history-and-culture)
<file_sep>/_data/blog/rwanda.md
---
template: BlogPost
path: /template
date: 2020-11-23T07:08:53.137Z
title: Rwanda
thumbnail: 'https://www.explorerwandatours.com/wp-content/uploads/2019/08/Kigali-City-750x450.jpg'
metaDescription: Country Page Post
---
# General Information about Rwanda
- Name: Rwanda
- African Abbreviation: RW
- Capital City: Kigali
- Population: 12,952,218
- African Subregion: Eastern Africa
### Country Flag

## Economic Information
- GDP (thousands): 10,122,472.59
- GDP Growth (annual %) in 2019: 9.4
- GDP Per Capita: 801.7
## A Brief Overview of Rwanda
Rwanda is a country of mainly three groups of people these are the Hutu, Tutsi, and the Twa. Music and dance are an essential part of ceremonies, celebrations, and social gatherings.
#### References:
- [Capital Image](https://www.explorerwandatours.com/wp-content/uploads/2019/08/Kigali-City-750x450.jpg)
- [Flags](https://github.com/hjnilsson/country-flags)
- [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
- [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
- [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
- [Economic Information](https://data.worldbank.org/)
- [Writeup Information](https://www.aboutrwanda.com/rwandas-culture-and-people/)
<file_sep>/_data/blog/mozambique.md
---
template: BlogPost
path: /template
date: 2020-11-23T07:08:53.137Z
title: Mozambique
thumbnail: 'https://www.worldatlas.com/upload/a5/44/e2/capital-of-mozambique.jpg'
metaDescription: Country Page Post
---
# General Information about Mozambique
- Name: Mozambique
- African Abbreviation: MZ
- Capital City: Maputo
- Population: 31,255,435
- African Subregion: Eastern Africa
### Country Flag

## Economic Information
- GDP (thousands): 14,934,159.93
- GDP Growth (annual %) in 2019: 2.2
- GDP Per Capita: 491.8
## A Brief Overview of Mozambique
Portuguese is the most widely spoken language in Mozambique and is used for business, education and state business. Many indigenous languages are also spoken. The cultures and traditions of Islam, Swahili and Bantu speakers co-exist harmoniously in the country. Soccer is the country's favorite activity.
#### References:
- [Capital Image](https://www.worldatlas.com/upload/a5/44/e2/capital-of-mozambique.jpg)
- [Flags](https://github.com/hjnilsson/country-flags)
- [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
- [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
- [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
- [Economic Information](https://data.worldbank.org/)
- [Writeup Information](http://www.mozambique.co.za/About_Mozambique-travel/mozambique-culture.html)
<file_sep>/_data/blog/botswana.md
---
template: BlogPost
path: /botswana
date: 2020-11-23T07:08:53.137Z
title: Botswana
thumbnail: 'https://i.pinimg.com/736x/e2/8d/cb/e28dcbadce1e48e6e39bdb67e4b26697.jpg'
metaDescription: Country Page Post
---
# General Information about Botswana
- Name: Botswana
- African Abbreviation: BT
- Capital City: Gaborone
- Population: 2,351,627
- African Subregion: Southern Africa
### Country Flag

## Economic Information
- GDP (thousands): 18,340,510.79
- GDP Growth (annual %) in 2019: 3
- GDP Per Capita: 7691.3
#### References:
- [Capital Image](https://i.pinimg.com/736x/e2/8d/cb/e28dcbadce1e48e6e39bdb67e4b26697.jpg)
- [Flags](https://github.com/hjnilsson/country-flags)
- [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
- [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
- [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
- [Economic Information](https://data.worldbank.org/)
<file_sep>/_data/blog/mauritius.md
---
template: BlogPost
path: /mauritius
date: 2020-11-23T07:08:53.137Z
title: Mauritius
thumbnail: 'https://www.worldatlas.com/upload/82/fa/66/shutterstock-513464896.jpg'
metaDescription: Country Page Post
---
# General Information about Mauritius
- Name: Mauritius
- African Abbreviation: MA
- Capital City: Port Louis
- Population: 1,271,768
- African Subregion: Eastern Africa
### Country Flag

## Economic Information
- GDP (thousands): 14,180,444.56
- GDP Growth (annual %) in 2019: 3.6
- GDP Per Capita: 11203.5
## A Brief Overview of Mauritius
Music plays a big part in Mauritian culture and the most listened to styles are Séga, Reggae and Seggae. There are also many famous writers stemming from the country. The most famous local craft is woodcraft. There is no official religion in the country.
#### References:
- [Capital Image](https://www.worldatlas.com/upload/82/fa/66/shutterstock-513464896.jpg)
- [Flags](https://github.com/hjnilsson/country-flags)
- [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
- [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
- [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
- [Economic Information](https://data.worldbank.org/)
- [Writeup Information](https://www.villanovo.com/guides/mauritius/culture-traditions)
<file_sep>/_data/blog/gambia.md
---
template: BlogPost
path: /gambia
date: 2020-11-23T07:08:53.137Z
title: Gambia
thumbnail: 'https://www.worldatlas.com/upload/c2/e0/a1/shutterstock-545611225-1.jpg'
metaDescription: Country Page Post
---
# General Information about Gambia
- Name: Gambia
- African Abbreviation: GA
- Capital City: Banjul
- Population: 2,416,668
- African Subregion: Western Africa
### Country Flag

## Economic Information
- GDP (thousands): 1,763,819.05
- GDP Growth (annual %) in 2019: 6
- GDP Per Capita: 751.3
## A Brief Overview of Gambia
Most Gambians are bilingual and can speak English, as the country is a former British colony. Life is taken at a relaxed pace and the people are friendly.
If invited to a Gambian home it is polite to bring a small gift. You may also try local Gambian dishes like Benachin (rice and vegetables) or Domoda (stewed meat). The main tribes in the country are the Mandinka, Wolof, Fula, and Jola, each of which have their own languages and traditions.
The country is predominantly Muslim but there is no strict adherence the Sharia Law. "This is a country that remains devout in its faith but wears its religion lightly."
#### References:
- [Capital Image](https://www.worldatlas.com/upload/c2/e0/a1/shutterstock-545611225-1.jpg)
- [Flags](https://github.com/hjnilsson/country-flags)
- [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
- [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
- [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
- [Economic Information](https://data.worldbank.org/)
- [Writeup Information](https://www.gambia.co.uk/guide/culture-language-and-religion)
<file_sep>/_data/blog/congo.md
---
template: BlogPost
path: /congo
date: 2020-11-23T07:08:53.137Z
title: Congo
thumbnail: 'https://media.cntraveler.com/photos/5a0089f69068b525708f1f4a/16:9/w_4767,h_2681,c_limit/Maphead_Kinshasa--Brazzaville_GettyImages-540026844.jpg'
metaDescription: Country Page Post
---
# General Information about Congo
- Name: Congo
- African Abbreviation: RC
- Capital City: Brazzaville
- Population: 5,518,087
- African Subregion: Middle Africa
### Country Flag

## Economic Information
- GDP (thousands): 10,820,591.13
- GDP Growth (annual %) in 2019: -.9
- GDP Per Capita: 2011.1
## A Brief Overview of Congo
Located just across the river from the capital city of the Democratic Republic of the Congo, Kinshasa, sits the capital city of the Republic of Congo, Brazzaville.
Christianity and colonialism had a huge impact on the art forms and culture of the country. Precolonial arts emphasized ceremonial music, dance, sculpture, and oral literature. In the 1980s, Brazzaville became a major center for the production of contemporary African music, also known as Congolese music or rumba, which mixes traditional African rhythms and instruments. This music is popular across the continent.
Congolese people observe the major holidays that Christians do. Soccer is a popular sport in Congo and is widely played and watched.
#### References:
- [Capital Image](https://media.cntraveler.com/photos/5a0089f69068b525708f1f4a/16:9/w_4767,h_2681,c_limit/Maphead_Kinshasa--Brazzaville_GettyImages-540026844.jpg)
- [Flags](https://github.com/hjnilsson/country-flags)
- [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
- [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
- [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
- [Economic Information](https://data.worldbank.org/)
- [Writeup Information](https://www.britannica.com/place/Republic-of-the-Congo/Cultural-life)
<file_sep>/_data/blog/morocco.md
---
template: BlogPost
path: /morocco
date: 2020-11-23T07:08:53.137Z
title: Morocco
thumbnail: 'https://cdn.britannica.com/25/100025-050-B2CE03DD/Mouth-medina-Wadi-Bou-Regreg-Morocco-Rabat.jpg'
metaDescription: Country Page Post
---
# General Information about Morocco
- Name: Morocco
- African Abbreviation: MR
- Capital City: Rabat
- Population: 36,910,560
- African Subregion: Northern Africa
### Country Flag

## A Brief Overview of Morocco
The culture of Morocco is a blend of ethnic tradition and religion, reflecting the Berber, African, Arabs, and Jewish influence. The official languages are Berber and Arabic while French is also widely spoken. Islam is the major religion in the country. The people insist on unity and take care of their parents when they are old. Moroccan food is usually well cooked and contains many spices. Moroccan art has been influenced by other cultures and nations.
#### References:
- [Capital Image](https://cdn.britannica.com/25/100025-050-B2CE03DD/Mouth-medina-Wadi-Bou-Regreg-Morocco-Rabat.jpg)
- [Flags](https://github.com/hjnilsson/country-flags)
- [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
- [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
- [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
- [Economic Information](https://data.worldbank.org/)
- [Writeup Information](https://www.worldatlas.com/articles/morocco-culture-customs-and-traditions.html)
<file_sep>/_data/blog/somalia.md
---
template: BlogPost
path: /somalia
date: 2020-11-23T07:08:53.137Z
title: Somalia
thumbnail: 'https://i.pinimg.com/originals/ee/51/57/ee51576ec1220c0cc7669de6fccf9bd7.jpg'
metaDescription: Country Page Post
---
# General Information about Somalia
- Name: Somalia
- African Abbreviation: SO
- Capital City: Mogadishu
- Population: 15,893,222
- African Subregion: Eastern Africa
### Country Flag

## Economic Information
- GDP (thousands): 917,044.23
- GDP Growth (annual %) in 2019: -1.5
- GDP Per Capita: 126.9
## A Brief Overview of Somalia
Unlike many African nations, Somalia is composed of a single, homogeneous ethnic group. Although Somalis may differ in nuances of local lifestyle, they share a uniform language, religion, and culture, and trace their heritage to a common ancestor. The universal language in Somalia is Somali.
#### References:
- [Capital Image](https://i.pinimg.com/originals/ee/51/57/ee51576ec1220c0cc7669de6fccf9bd7.jpg)
- [Flags](https://github.com/hjnilsson/country-flags)
- [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
- [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
- [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
- [Economic Information](https://data.worldbank.org/)
- [Writeup Information](https://ethnomed.org/culture/somali/)
<file_sep>/_data/blog/senegal.md
---
template: BlogPost
path: /senegal
date: 2020-11-23T07:08:53.137Z
title: Senegal
thumbnail: 'https://www.worldatlas.com/upload/d2/56/84/dakar-senegal.jpg'
metaDescription: Country Page Post
---
# General Information about Senegal
- Name: Senegal
- African Abbreviation: SN
- Capital City: Dakar
- Population: 16,743,927
- African Subregion: Western Africa
### Country Flag

## Economic Information
- GDP (thousands): 23,578,084.05
- GDP Growth (annual %) in 2019: 5.3
- GDP Per Capita: 1446.8
## A Brief Overview of Senegal
French is the official language and most of the population practices Islam. Senegalese music is on the cutting edge of modern mixed with traditional drums, and the country’s people love to dance. It is well known in the cinema and literature worlds for its production of quality work.
#### References:
- [Capital Image](https://www.worldatlas.com/upload/d2/56/84/dakar-senegal.jpg)
- [Flags](https://github.com/hjnilsson/country-flags)
- [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
- [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
- [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
- [Economic Information](https://data.worldbank.org/)
- [Writeup Information]()
<file_sep>/src/pages/about.js
import React from "react"
import Helmet from "react-helmet"
import { graphql } from 'gatsby'
import Layout from "../components/layout"
const AboutPage = ({
data: {
site
},
}) => {
return (
<Layout>
<Helmet>
<title>About — {site.siteMetadata.title}</title>
<meta name="description" content={"About page of " + site.siteMetadata.description} />
</Helmet>
<div className="two-grids -contact">
<div className="post-thumbnail" style={{backgroundImage: `url('https://iea.imgix.net/5f8a3b57-74e5-423b-8d00-86afeeef7b3a/GettyImages-175566839.jpg?auto=compress%2Cformat&fit=min&q=80&rect=496%2C4491%2C3618%2C2402&w=1280&h=850&fit=crop&fm=jpg&q=70&auto=format')`, marginBottom: 0}}>
<h1 className="post-title">About This Website</h1>
<p>Learn more about our website →</p>
</div>
<div>
<p>This website contains a page for each and every African country. Each page contains information about the county such as it's abbreviation, population, capital city, and more. Each page also contains an image of the country flag and of it's capital city. Most pages also contain a brief writeup that talks about the country's history, culture, and more!</p>
<p>This website was created to shine light on all African countries, giving users of the website a broad overview of what is going on within them. The website creator has been investigating African countries for years through coursework and wanted to create a simple website showcasing them.</p>
<p>Visitors of the website with questions, concerns, or ideas should use the Contact page to submit an email.</p>
</div>
</div>
</Layout>
)
}
export default AboutPage
export const pageQuery = graphql`
query AboutPageQuery{
site {
siteMetadata {
title
description
}
}
}
`
<file_sep>/_data/blog/chad.md
---
template: BlogPost
path: /chad
date: 2020-11-23T07:08:53.137Z
title: Chad
thumbnail: 'https://i.pinimg.com/originals/a9/87/d2/a987d2ef7d6d066d470182cf4d76607f.png'
metaDescription: Country Page Post
---
# General Information about Chad
- Name: Chad
- African Abbreviation: TD
- Capital City: N'Djamena
- Population: 16,425,864
- African Subregion: Middle Africa
### Country Flag

## Economic Information
- GDP (thousands): 11,314,951.34
- GDP Growth (annual %) in 2019: 3.2
- GDP Per Capita: 709.5
## An Overview of Chad
Ruled by the colonial French in their early days has had a large impact on Chad society. Despite this, 200 ethnic groups still live in the country that live autonomously. Each ethnic group has its own literary tradition while most written materials in the cities are French. Dress is informal, but conservative to show respect for Muslim laws and ideals. The economy of Chad is still growing and the large oil deposits in the South of the country are helping it.
#### References:
- [Capital Image](https://i.pinimg.com/originals/a9/87/d2/a987d2ef7d6d066d470182cf4d76607f.png)
- [Flags](https://github.com/hjnilsson/country-flags)
- [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
- [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
- [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
- [Economic Information](https://data.worldbank.org/)
- [Writeup Information](https://www.iexplore.com/articles/travel-guides/africa/chad/history-and-culture)
<file_sep>/_data/blog/ethiopia.md
---
template: BlogPost
path: /ethiopia
date: 2020-11-23T07:08:53.137Z
title: Ethiopia
thumbnail: 'https://im-media.voltron.voanews.com/Drupal/01live-166/styles/sourced/s3/2019-04/8A77E9A0-6A9D-4C81-9775-1C82A7099CC1.jpg?itok=KusGy8-8'
metaDescription: Country Page Post
---
# General Information about Ethiopia
- Name: Ethiopia
- African Abbreviation: ET
- Capital City: Addis Ababa (Addis Abeba)
- Population: 114,963,588
- African Subregion: Eastern Africa
### Country Flag

## Economic Information
- GDP (thousands): 96,107,662.40
- GDP Growth (annual %) in 2019: 8.3
- GDP Per Capita: 857.5
## A Brief Overview of Ethiopia
Ethiopia has 84 indigenous languages, while English is the most widely spoken foreign languae. Local languaes are commonly used in schools.
After the fall of the Derg regime in 1991, the constitution granted all ethnic groups the right to develop their languages and to establish a mother tongue that is primary for their education systems. With this, Ethiopia is a multi-cultural and multi-ethnic country. Religion is quite important with the Ethiopian Orthodox Church and Muslim being the two largest groups.
The extended family is the focus of the social system. Individuals achieve recognition or social standing through their extended family.
#### References:
- [Capital Image](https://im-media.voltron.voanews.com/Drupal/01live-166/styles/sourced/s3/2019-04/8A77E9A0-6A9D-4C81-9775-1C82A7099CC1.jpg?itok=KusGy8-8)
- [Flags](https://github.com/hjnilsson/country-flags)
- [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
- [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
- [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
- [Economic Information](https://data.worldbank.org/)
- [Writeup Information](https://www.commisceo-global.com/resources/country-guides/ethiopia-guide)
<file_sep>/_data/blog/nigeria.md
---
template: BlogPost
path: /nigeria
date: 2020-11-23T07:08:53.137Z
title: Nigeria
thumbnail: 'https://img.etimg.com/thumb/msid-69183882,width-640,resizemode-4,imgsize-1092250/nigeria-lagos-to-abuja.jpg'
metaDescription: Country Page Post
---
# General Information about Nigeria
- Name: Nigeria
- African Abbreviation: NI
- Capital City: Abuja
- Population: 206,139,589
- African Subregion: Western Africa
### Country Flag

## Economic Information
- GDP (thousands): 448,120,428.86
- GDP Growth (annual %) in 2019: 2.2
- GDP Per Capita: 2229.2
## A Brief Overview of Nigeria
Nigeria is a multinational state inhabited by more than 250 ethnic groups with over 500 distinct languages. The official language of the country is English. The main religions are Christianity and Muslim.
#### References:
- [Capital Image](https://img.etimg.com/thumb/msid-69183882,width-640,resizemode-4,imgsize-1092250/nigeria-lagos-to-abuja.jpg)
- [Flags](https://github.com/hjnilsson/country-flags)
- [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
- [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
- [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
- [Economic Information](https://data.worldbank.org/)
- [Writeup Information](https://artsandculture.google.com/entity/nigeria/m05cgv?hl=en)
<file_sep>/_data/blog/mauritania.md
---
template: BlogPost
path: /mauritania
date: 2020-11-23T07:08:53.137Z
title: Mauritania
thumbnail: 'https://www.worldatlas.com/upload/49/30/78/shutterstock-651522553.jpg'
metaDescription: Country Page Post
---
# General Information about Mauritania
- Name: Mauritania
- African Abbreviation: MU
- Capital City: Nouakchott
- Population: 4,649,658
- African Subregion: Western Africa
### Country Flag

## Economic Information
- GDP (thousands): 7,593,752.45
- GDP Growth (annual %) in 2019: 5.9
- GDP Per Capita: 1677.9
## A Brief Overview of Mauritania
The history of the country is one characterized by conflict. Today, the country is making strides towards a stable democracy. The country's culture is a mix of many influences, both indigenous and external. Mauritanian music is a tradition carried down by the Moors and their traditional instruments are still seen today.
#### References:
- [Capital Image](https://www.worldatlas.com/upload/49/30/78/shutterstock-651522553.jpg)
- [Flags](https://github.com/hjnilsson/country-flags)
- [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
- [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
- [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
- [Economic Information](https://data.worldbank.org/)
- [Writeup Information](https://www.iexplore.com/articles/travel-guides/africa/mauritania/history-and-culture)
<file_sep>/_data/blog/uganda.md
---
template: BlogPost
path: /uganda
date: 2020-11-23T07:08:53.137Z
title: Uganda
thumbnail: 'https://www.murchisonfallsparkuganda.com/wp-content/uploads/2019/06/Kampala-Capital-City-750x450.jpg'
metaDescription: Country Page Post
---
# General Information about Uganda
- Name: Uganda
- African Abbreviation: UG
- Capital City: Kampala
- Population: 45,741,007
- African Subregion: Eastern Africa
### Country Flag

## Economic Information
- GDP (thousands): 34,387,229.49
- GDP Growth (annual %) in 2019: 6.5
- GDP Per Capita: 776.8
## A Brief Overview of Uganda
Luganda is the most commonly spoken language while Christians make up 85% of the country's population. The sport of the country is soccer. Most of their food has starch as a base, with sauces of beans or meat to add flavour to it.
#### References:
- [Capital Image](https://www.murchisonfallsparkuganda.com/wp-content/uploads/2019/06/Kampala-Capital-City-750x450.jpg)
- [Flags](https://github.com/hjnilsson/country-flags)
- [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
- [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
- [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
- [Economic Information](https://data.worldbank.org/)
- [Writeup Information](https://www.africanoverlandtours.com/overland-africa-blog/the-culture-of-uganda/)
<file_sep>/_data/blog/libya.md
---
template: BlogPost
path: /libya
date: 2020-11-23T07:08:53.137Z
title: Libya
thumbnail: 'https://www.worldatlas.com/upload/8a/51/8d/tripoli-libya-capital-city.jpg'
metaDescription: Country Page Post
---
# General Information about Libya
- Name: Libya
- African Abbreviation: LB
- Capital City: Tripoli
- Population: 6,871,292
- African Subregion: Northern Africa
### Country Flag

## A Brief Overview of Libya
Arabic is the main language spoken in the country, while Tamazight is also spoken by Libyan Berbers. Most Libyans consider themselves Arab, and about 20% of the population is made up of foreign workers. As Arabs, most Libyans are Muslim. In compliance with Islam Law, alcohol was banned. Today it is increasingly common for young couples to set up their own homes.
#### References:
- [Capital Image](https://www.worldatlas.com/upload/8a/51/8d/tripoli-libya-capital-city.jpg)
- [Flags](https://github.com/hjnilsson/country-flags)
- [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
- [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
- [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
- [Economic Information](https://data.worldbank.org/)
- [Writeup Information](https://www.commisceo-global.com/resources/country-guides/libya-guide)
<file_sep>/_data/blog/kenya.md
---
template: BlogPost
path: /kenya
date: 2020-11-23T07:08:53.137Z
title: Kenya
thumbnail: 'https://i.pinimg.com/originals/e1/ef/dd/e1efdd138e0410287b90244fe47e28a5.jpg'
metaDescription: Country Page Post
---
# General Information about Kenya
- Name: Kenya
- African Abbreviation: KY
- Capital City: Nairobi
- Population: 53,771,296
- African Subregion: Eastern Africa
### Country Flag

## Economic Information
- GDP (thousands): 95,503,088.54
- GDP Growth (annual %) in 2019: 5.4
- GDP Per Capita: 1816.5
## A Brief Overview of Kenya
Kenya is a multilingual country, with over 62 different languages being spoken throughout the land. Swahili and English are the official languages. There are 13 primary ethnic groups residing in the country while around 13% of the country is of non-African descent.
About half of the population are Christians while there are also Muslim and Hindu minorities among others. Kenyans are group-orientated instead of being individualistic. The extended family is also the basis of social structure while Kenyans are also fairly formal at the dinner table.
#### References:
- [Capital Image](https://i.pinimg.com/originals/e1/ef/dd/e1efdd138e0410287b90244fe47e28a5.jpg)
- [Flags](https://github.com/hjnilsson/country-flags)
- [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
- [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
- [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
- [Economic Information](https://data.worldbank.org/)
- [Writeup Information](https://www.commisceo-global.com/resources/country-guides/kenya-guide)
<file_sep>/_data/blog/malawi.md
---
template: BlogPost
path: /malawi
date: 2020-11-23T07:08:53.137Z
title: Malawi
thumbnail: 'https://i.pinimg.com/originals/c0/12/5d/c0125dfd87ddf8ed13b626feeff771de.jpg'
metaDescription: Country Page Post
---
# General Information about Malawi
- Name: Malawi
- African Abbreviation: MW
- Capital City: Lilongwe
- Population: 19,129,952
- African Subregion: Eastern Africa
### Country Flag

## Economic Information
- GDP (thousands): 7,666,704.43
- GDP Growth (annual %) in 2019: 4.4
- GDP Per Capita: 411.6
## A Brief Overview of Malawi
There are many different ethnic groups in the country. The Chichewa form the largest part of the population group. Malawi is often called the "Warm Heart of Africa" because of the warmth and friendliness of the people. Malawains typically live with their extended families in village huts.
#### References:
- [Capital Image](https://i.pinimg.com/originals/c0/12/5d/c0125dfd87ddf8ed13b626feeff771de.jpg)
- [Flags](https://github.com/hjnilsson/country-flags)
- [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
- [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
- [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
- [Economic Information](https://data.worldbank.org/)
- [Writeup Information](https://www.earth-cultures.com/cultures/people-of-malawi)
<file_sep>/_data/blog/liberia.md
---
template: BlogPost
path: /liberia
date: 2020-11-23T07:08:53.137Z
title: Liberia
thumbnail: 'https://cdn.britannica.com/10/210210-050-A31F0882/Monrovia-Liberia.jpg'
metaDescription: Country Page Post
---
# General Information about Liberia
- Name: Liberia
- African Abbreviation: LI
- Capital City: Monrovia
- Population: 5,057,681
- African Subregion: Western Africa
### Country Flag

## Economic Information
- GDP (thousands): 3,070,518.10
- GDP Growth (annual %) in 2019: -2.3
- GDP Per Capita: 621.9
## A Brief Overview of Liberia
Liberia was founded by freed slaves from the American South, who brought their own culture and displaced the ancient tribes who farmed the lands. After a devastating 14-year civil war the diverse ethnicities of the country are now at peace. The two main cultures in the country are those stemming from the American South and from the traditional African tribes who lived on the land.
#### References:
- [Capital Image](https://cdn.britannica.com/10/210210-050-A31F0882/Monrovia-Liberia.jpg)
- [Flags](https://github.com/hjnilsson/country-flags)
- [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
- [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
- [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
- [Economic Information](https://data.worldbank.org/)
- [Writeup Information](https://www.iexplore.com/articles/travel-guides/africa/liberia/history-and-culture)
<file_sep>/_data/blog/sl.md
---
template: BlogPost
path: /sl
date: 2020-11-23T07:08:53.137Z
title: Sierra Leone
thumbnail: 'https://www.worldatlas.com/upload/ac/7b/56/shutterstock-580538959.jpg'
metaDescription: Country Page Post
---
# General Information about Sierra Leone
- Name: <NAME>
- African Abbreviation: SL
- Capital City: Freetown
- Population: 7,976,983
- African Subregion: Western Africa
### Country Flag

## Economic Information
- GDP (thousands): 3,941,474.31
- GDP Growth (annual %) in 2019: 5.5
- GDP Per Capita: 504.5
## A Brief Overview of Sierra Leone
Islam is the most widely practiced religion. There are many different ethnic groups in the country. The official language is English.
#### References:
- [Capital Image](https://www.worldatlas.com/upload/ac/7b/56/shutterstock-580538959.jpg)
- [Flags](https://github.com/hjnilsson/country-flags)
- [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
- [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
- [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
- [Economic Information](https://data.worldbank.org/)
- [Writeup Information](https://www.worldtravelguide.net/guides/africa/sierra-leone/history-language-culture/)
<file_sep>/_data/blog/ghana.md
---
template: BlogPost
path: /ghana
date: 2020-11-23T07:08:53.137Z
title: Ghana
thumbnail: 'https://upload.wikimedia.org/wikipedia/commons/b/b6/A_drone_footage_of_Accra_central%2C_Ghana.jpg'
metaDescription: Country Page Post
---
# General Information about Ghana
- Name: Ghana
- African Abbreviation: GH
- Capital City: Accra
- Population: 31,072,940
- African Subregion: Western Africa
### Country Flag

## Economic Information
- GDP (thousands): 66,983,634.22
- GDP Growth (annual %) in 2019: 6.5
- GDP Per Capita: 2202.1
## A Brief Overview of Ghana
The people of Ghana are warm, friendly, and polite. They take life at a relaxed pace. In this society, people are more important than schedules. There are 6 major ethnic groups in the country, that break up into 60 smaller ones. 52 different languages and hundreds of dialects are spoken throughout the country, while the official language is English because of the country's British colonizers.
Individual conduct is seen as having a large impact on entire families, social groups, and community. Everyone is expected to be respectful and dignified. Ghana is often described as the land of festivals, music, and traditional dances.
The government in the country today is a unitary republic consisting of a president, parliament, cabinet, council of state, and an independent judiciary.
#### References:
- [Capital Image](https://upload.wikimedia.org/wikipedia/commons/b/b6/A_drone_footage_of_Accra_central%2C_Ghana.jpg)
- [Flags](https://github.com/hjnilsson/country-flags)
- [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
- [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
- [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
- [Economic Information](https://data.worldbank.org/)
- [Writeup Information](https://blog.compassion.com/traditions-of-ghana-warrior-king/)
<file_sep>/_data/blog/burkina-faso.md
---
template: BlogPost
path: /burkina-faso
date: 2020-11-23T07:08:53.137Z
title: Burkina Faso
thumbnail: 'https://upload.wikimedia.org/wikipedia/commons/thumb/5/56/OUAGADOUGOU3.JPG/1200px-OUAGADOUGOU3.JPG'
metaDescription: Country Page Post
---
# General Information about Burkina Faso
- Name: Burkina Faso
- African Abbreviation: BF
- Capital City: Ouagadougou
- Population: 20,903,273
- African Subregion: Western Africa
### Country Flag

## Economic Information
- GDP (thousands): 15,745,810.23
- GDP Growth (annual %) in 2019: 5.7
- GDP Per Capita: 774.8
#### References:
- [Capital Image](https://upload.wikimedia.org/wikipedia/commons/thumb/5/56/OUAGADOUGOU3.JPG/1200px-OUAGADOUGOU3.JPG)
- [Flags](https://github.com/hjnilsson/country-flags)
- [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
- [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
- [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
- [Economic Information](https://data.worldbank.org/)
<file_sep>/_data/blog/eritrea.md
---
template: BlogPost
path: /eritrea
date: 2020-11-23T07:08:53.137Z
title: Eritrea
thumbnail: 'https://cdn.britannica.com/19/195819-050-0201D7BF/Asmara-Eritrea.jpg'
metaDescription: Country Page Post
---
# General Information about Eritrea
- Name: Eritrea
- African Abbreviation: ER
- Capital City: Asmara (Asmera)
- Population: 3,546,421
- African Subregion: Eastern Africa
### Country Flag

## Economic Information
- GDP (thousands): 2,065,001.63
- GDP Growth (annual %) in 2019: 8.7
- GDP Per Capita: 642.5
## A Brief Overview of Eritrea
Located on the Red Sea, local customs and music in the country have heavily been influenced by the country's ethnic background. Food is similar to Ethiopian cuisine. Tensions have been high between Ethiopia and Eritrea since the 1900s.
Eritrea is historically a trade center and art is expressed both thru music and crafts. Tsebhi is a speciality stew of the country while kitcha fit-fit is a staple meal, that is spiced and oiled bread served with yogurt and bebere for dip. Drumming is common in many communities. Religion is a mix though most follow the Christian or Muslim faith.
#### References:
- [Capital Image](https://cdn.britannica.com/19/195819-050-0201D7BF/Asmara-Eritrea.jpg)
- [Flags](https://github.com/hjnilsson/country-flags)
- [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
- [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
- [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
- [Economic Information](https://data.worldbank.org/)
- [Writeup Information](https://www.iexplore.com/articles/travel-guides/africa/eritrea/history-and-culture)
<file_sep>/_data/blog/central-african-republic.md
---
template: BlogPost
path: /central-african-republic
date: 2020-11-23T07:08:53.137Z
title: Central African Republic
thumbnail: 'https://www.blackpast.org/wp-content/uploads/City_center_Bangui_Central_African_Republic_2014.jpg'
metaDescription: Country Page Post
---
# General Information about Central African Republic
- Name: Central African Republic
- African Abbreviation: CA
- Capital City: Bangui
- Population: 4,829,767
- African Subregion: Middle Africa
### Country Flag

## Economic Information
- GDP (thousands): 2,220,307.37
- GDP Growth (annual %) in 2019: 3%
- GDP Per Capita: 467.9
#### References:
- [Capital Image](https://www.blackpast.org/wp-content/uploads/City_center_Bangui_Central_African_Republic_2014.jpg)
- [Flags](https://github.com/hjnilsson/country-flags)
- [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
- [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
- [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
- [Economic Information](https://data.worldbank.org/)
<file_sep>/_data/blog/madagascar.md
---
template: BlogPost
path: /madagascar
date: 2020-11-23T07:08:53.137Z
title: Madagascar
thumbnail: 'https://cdn.britannica.com/59/141359-050-59C1D78A/Antananarivo-Madagascar.jpg'
metaDescription: Country Page Post
---
# General Information about Madagascar
- Name: Madagascar
- African Abbreviation: MD
- Capital City: Antananarivo
- Population: 31,072,940
- African Subregion: Western Africa
### Country Flag

## Economic Information
- GDP (thousands): 14,083,906.36
- GDP Growth (annual %) in 2019: 4.8
- GDP Per Capita: 522.2
## A Brief Overview of Madagascar
The Malagasy cuisine is based on rice as a staple of the diet, typically it is served in every meal. Traditional dresses are still worn in rural areas throughout the country. Modern day poets and writes promote the use of the Malagasy language while Moraingy is an indigenous hand-to-hand combat game that is popular. Christian festivals are celebrated widely throughout Madagascar.
#### References:
- [Capital Image](https://cdn.britannica.com/59/141359-050-59C1D78A/Antananarivo-Madagascar.jpg)
- [Flags](https://github.com/hjnilsson/country-flags)
- [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
- [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
- [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
- [Economic Information](https://data.worldbank.org/)
- [Writeup Information](https://www.worldatlas.com/articles/the-culture-of-madagascar.html)
<file_sep>/_data/blog/eswatini.md
---
template: BlogPost
path: /eswatini
date: 2020-11-23T07:08:53.137Z
title: Eswatini
thumbnail: 'https://thecommonwealth.org/sites/default/files/styles/press_release_large/public/images/hero/mbabane-city-aritcle.jpg?itok=qgJWwI9C'
metaDescription: Country Page Post
---
# General Information about Eswatini
- Name: Eswatini (formerly Swaziland)
- African Abbreviation: SZ
- Capital City: Mbabane
- Population: 1,160,164
- African Subregion: Southern Africa
### Country Flag

## Economic Information
- GDP (thousands): 4,405,405.80
- GDP Growth (annual %) in 2019: 2
- GDP Per Capita: 3837.0
## A Brief Overview of Eswatini
The country has two main cultural events. The first is Incwala which is a complex ritual of renewing the kingship and the nation, with special dances and songs for the occasion, lasting 6 days. The other is the Umhlanga, which brings the unmarried girls and young women of the country together to cut reeds to repair the windbreaks of the queen's home village for 5 days.
Common musical instruments are simple design, such as a kudu horn, a calabash, the reed flute, and more. Nowadays people listen to the radio or record/tape players.
#### References:
- [Capital Image](https://thecommonwealth.org/sites/default/files/styles/press_release_large/public/images/hero/mbabane-city-aritcle.jpg?itok=qgJWwI9C)
- [Flags](https://github.com/hjnilsson/country-flags)
- [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
- [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
- [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
- [Economic Information](https://data.worldbank.org/)
- [Writeup Information](https://www.britannica.com/place/Eswatini/Cultural-life)
<file_sep>/_data/blog/cabo-verde.md
---
template: BlogPost
path: /cabo-verde
date: 2020-11-23T07:08:53.137Z
title: Cape Verde
thumbnail: 'https://www.worldatlas.com/upload/10/48/d5/shutterstock-317019080.jpg'
metaDescription: Country Page Post
---
# General Information about Cape Verde
- Name: <NAME>
- AKA: Cabo Verde
- African Abbreviation: CV
- Capital City: Praia
- Population: 555,987
- African Subregion: Western Africa
### Country Flag

## Economic Information
- GDP (thousands): 1,981,845.74
- GDP Growth (annual %) in 2019: 5.7
- GDP Per Capita: 3603.8
#### References:
- [Capital Image](https://www.worldatlas.com/upload/10/48/d5/shutterstock-317019080.jpg)
- [Flags](https://github.com/hjnilsson/country-flags)
- [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
- [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
- [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
- [Economic Information](https://data.worldbank.org/)
<file_sep>/_data/blog/gabon.md
---
template: BlogPost
path: /gabon
date: 2020-11-23T07:08:53.137Z
title: Gabon
thumbnail: 'https://www.blackpast.org/wp-content/uploads/prodimages/files/blackpast_images/Libreville_Gabon_2012.jpg'
metaDescription: Country Page Post
---
# General Information about Gabon
- Name: Gabon
- African Abbreviation: GB
- Capital City: Libreville
- Population: 2,225,734
- African Subregion: Middle Africa
### Country Flag

## Economic Information
- GDP (thousands): 16,657,960.23
- GDP Growth (annual %) in 2019: 3.4
- GDP Per Capita: 7667.4
## A Brief Overview of Gabon
While being strongly influenced by their former French colonists, there are a number of strong cultural pillars in Gabon. Dance, songs, myths, and poetry are important elements of Gabonese life, while common art forms include the creation of masks, sculptures, and musical instruments.
Traditional instruments in Gabon include the balafon, harp, mouth bow, drums, and bells. Culture in the country is expressed through paintings, sculptures, and fashion. Original dresses made by Gabon designers are well recognized in the world of African fashion. Up to 40 indigenous languages are spoken in Gabon, but French is the official language that is used and taught in all schools.
In 1839, the French established the first long-term European settlement in the territory. Gabon remained a French territory until it declared independence in 1960.
#### References:
- [Capital Image](https://www.blackpast.org/wp-content/uploads/prodimages/files/blackpast_images/Libreville_Gabon_2012.jpg)
- [Flags](https://github.com/hjnilsson/country-flags)
- [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
- [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
- [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
- [Economic Information](https://data.worldbank.org/)
- [Writeup Information](https://www.iexplore.com/articles/travel-guides/africa/gabon/history-and-culture)
<file_sep>/_data/blog/egypt.md
---
template: BlogPost
path: /egypt
date: 2020-11-23T07:08:53.137Z
title: Egypt
thumbnail: 'https://www.nationsonline.org/gallery/Egypt/Cairo-River-Nile-Egypt.jpg'
metaDescription: Country Page Post
---
# General Information about Egypt
- Name: Egypt
- African Abbreviation: EG
- Capital City: Cairo
- Population: 102,334,404
- African Subregion: Northern Africa
### Country Flag

## A Brief Overview of Egypt
Egypt is one of the earliest civilizations and is a melting pot of sorts. Tourism is one of the main sources of income in the country and Egyptians are known for their good hospitality.
Family is very important to Egyptians and children often live with their parents until they get married. It is customary to wear only black for at least 40 days after a family member passes. Egyptians also enjoy celebrations with their families and extended families.
Muslim and Christianity are the main religions and both are intertwined with daily life. Mosques are on almost every street of every Egyptian city. The country follows the Western calendar but follows the Islamic calendar for Islamic religious holidays.
#### References:
- [Capital Image](https://www.nationsonline.org/gallery/Egypt/Cairo-River-Nile-Egypt.jpg)
- [Flags](https://github.com/hjnilsson/country-flags)
- [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
- [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
- [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
- [Economic Information](https://data.worldbank.org/)
- [Writeup Information](https://www.globalizationpartners.com/2017/01/17/traditions-and-cultures-of-egypt/)
<file_sep>/_data/blog/zambia.md
---
template: BlogPost
path: /zambia
date: 2020-11-23T07:08:53.137Z
title: Zambia
thumbnail: 'https://www.queenelizabethwildlifesafaris.com/wp-content/uploads/2020/06/lusaka.jpg'
metaDescription: Country Page Post
---
# General Information about Zambia
- Name: Zambia
- African Abbreviation: ZM
- Capital City: Lusaka
- Population: 18,383,955
- African Subregion: Eastern Africa
### Country Flag

## Economic Information
- GDP (thousands): 23,064,722.45
- GDP Growth (annual %) in 2019: 1.7
- GDP Per Capita: 1291.3
## A Brief Overview of Zambia
The main exports of the country were copper, ivory, and slaves before colonization. The colonial period saw ethnically different people brought together by economic interests. With independence, institutions to protect and promote Zambia’s culture were created, including the National Heritage Conservation Commission.
#### References:
- [Capital Image](https://www.queenelizabethwildlifesafaris.com/wp-content/uploads/2020/06/lusaka.jpg)
- [Flags](https://github.com/hjnilsson/country-flags)
- [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
- [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
- [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
- [Economic Information](https://data.worldbank.org/)
- [Writeup Information](https://www.zambiatourism.com/about-zambia/people/culture/)
<file_sep>/_data/blog/angola.md
---
template: BlogPost
path: /angola
date: 2020-11-23T07:08:53.137Z
title: Angola
metaDescription: Country Page Post
thumbnail: 'https://cdn.britannica.com/31/195831-050-ABDCE370/Luanda-Angola.jpg'
---
# General Information about Angola
* Name: Angola
* African Abbreviation: AN
* Capital City: Luanda
* Population: 32,866,272
* African Subregion: Middle Africa
### Country Flag

## Economic Information
* GDP (thousands): 94,635,415.87
* GDP Growth (annual %) in 2019: -.9
* GDP Per Capita: 2973.6
test
#### References:
* [Capital Image](https://cdn.britannica.com/31/195831-050-ABDCE370/Luanda-Angola.jpg)
* [Flags](https://github.com/hjnilsson/country-flags)
* [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
* [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
* [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
* [Economic Information](https://data.worldbank.org/)
<file_sep>/_data/blog/benin.md
---
template: BlogPost
path: /benin
date: 2020-11-23T07:08:53.137Z
title: Benin
thumbnail: 'https://i.pinimg.com/originals/ce/2f/7f/ce2f7f0e54be0fb741ad055973ee56e2.jpg'
metaDescription: Country Page Post
---
# General Information about Algeria
- Name: Benin
- African Abbreviation: BE
- Capital City: Porto-Novo
- Population: 12,123,200
- African Subregion: Western Africa
### Country Flag

## Economic Information
- GDP (thousands): 14,390,709.09
- GDP Growth (annual %) in 2019: 6.9
- GDP Per Capita: 1219.4
#### References:
- [Capital Image](https://i.pinimg.com/originals/ce/2f/7f/ce2f7f0e54be0fb741ad055973ee56e2.jpg)
- [Flags](https://github.com/hjnilsson/country-flags)
- [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
- [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
- [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
- [Economic Information](https://data.worldbank.org/)
<file_sep>/_data/blog/guinea.md
---
template: BlogPost
path: /guinea
date: 2020-11-23T07:08:53.137Z
title: Guinea
thumbnail: 'https://upload.wikimedia.org/wikipedia/commons/4/44/Conakry.jpg'
metaDescription: Country Page Post
---
# General Information about Guinea
- Name: Guinea
- African Abbreviation: GU
- Capital City: Conakry
- Population: 13,132,795
- African Subregion: Western Africa
### Country Flag

## Economic Information
- GDP (thousands): 13,590,281.81
- GDP Growth (annual %) in 2019: 5.6
- GDP Per Capita: 1064.1
## A Brief Overview of Guinea
Guinea is a largely Muslim country with a small percentage of Christians. Local people remain true to their ethnic backgrounds, but French customs are still prevalent. Guinea declared independence from France in 1958. Power struggles plagued the country's post-independence politics, leading to instability. Lately the country has been involved in territorial struggles and quarrels over mineral wealth against neighboring Sierra Leone and Liberia.
Major ethnic groups in the country include the Fula, Maninka, and Susus. Music is a huge aspect of life and locals celebrate a lot of festivals to commemorate this prevalent art form. Folk music is still revered today.
#### References:
- [Capital Image](https://upload.wikimedia.org/wikipedia/commons/4/44/Conakry.jpg)
- [Flags](https://github.com/hjnilsson/country-flags)
- [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
- [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
- [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
- [Economic Information](https://data.worldbank.org/)
- [Writeup Information](https://www.iexplore.com/articles/travel-guides/africa/guinea/history-and-culture)
<file_sep>/_data/blog/comoros.md
---
template: BlogPost
path: /comoros
date: 2020-11-23T07:08:53.137Z
title: Comoros
thumbnail: 'https://www.nationsonline.org/gallery/Comoros/Mutsamudu-Anjouan-Islands-Comoros.jpg'
metaDescription: Country Page Post
---
# General Information about Comoros
- Name: Comoros
- African Abbreviation: UC
- Capital City: Moroni
- Population: 869,601
- African Subregion: Eastern Africa
### Country Flag

## Economic Information
- GDP (thousands): 1,185,728.68
- GDP Growth (annual %) in 2019: 2.7
- GDP Per Capita: 1393.5
## A Brief Overview of Comoros
A group of islands first colonized by Africans in the 8th century characterized by unrest, Comoros is now peaceful. Political unrest is still lurking in the shadows in Comoros after numerous coups from the late 1970s to 1999, when the new government was overthrown.
As they were when first colonizing the island, Comorians are sill strong followers of Islam and their religious celebrations are widely observed. Overall, the local culture is a combination of Arab, French, and African ideals. The residents like music and performance arts, while local artisans specialize in things like pottery and basketry.
#### References:
- [Capital Image](https://www.nationsonline.org/gallery/Comoros/Mutsamudu-Anjouan-Islands-Comoros.jpg)
- [Flags](https://github.com/hjnilsson/country-flags)
- [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
- [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
- [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
- [Economic Information](https://data.worldbank.org/)
- [Writeup Information](https://www.iexplore.com/articles/travel-guides/africa/comoros/history-and-culture)
<file_sep>/_data/blog/tanzania.md
---
template: BlogPost
path: /tanzania
date: 2020-11-23T07:08:53.137Z
title: Tanzania
thumbnail: 'https://i.pinimg.com/originals/f6/79/3d/f6793d06d0245b7d738a0df17c5c1cad.jpg'
metaDescription: Country Page Post
---
# General Information about Tanzania
- Name: Tanzania
- African Abbreviation: TA
- Capital City: Dodoma
- Population: 59,734,218
- African Subregion: Eastern Africa
### Country Flag

## Economic Information
- GDP (thousands): 63,177,068.17
- GDP Growth (annual %) in 2019: 5.8
- GDP Per Capita: 1122.1
## A Brief Overview of Tanzania
There are over 120 ethnic groups that live in Tanzania with various religious beliefs. The locals are generally friendly towards visitors. Tanzanians only eat with their right hand when partaking in a meal while smelling food before eating it is seen as disrespectful.
#### References:
- [Capital Image](https://i.pinimg.com/originals/f6/79/3d/f6793d06d0245b7d738a0df17c5c1cad.jpg)
- [Flags](https://github.com/hjnilsson/country-flags)
- [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
- [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
- [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
- [Economic Information](https://data.worldbank.org/)
- [Writeup Information](https://theculturetrip.com/africa/tanzania/articles/traditions-and-customs-only-people-from-tanzania-will-understand/)
<file_sep>/_data/blog/cotedivoire.md
---
template: BlogPost
path: /cotedivoire
date: 2020-11-23T07:08:53.137Z
title: Cote d'Ivoire
thumbnail: 'https://cdn.britannica.com/28/194928-050-D09481CB/Yamoussoukro-Basilica-Cote-dIvoire.jpg'
metaDescription: Country Page Post
---
# General Information about Cote d'Ivoire
- Name: <NAME>'Ivoire
- African Abbreviation: IC
- Capital City: Yamoussoukro
- Population: 26,378,274
- African Subregion: Western Africa
### Country Flag

## Economic Information
- GDP (thousands): 58,792,205.64
- GDP Growth (annual %) in 2019: 6.9
- GDP Per Capita: 2286.2
## A Brief Overview of Cote d'Ivoire
Cote d'Ivoire, sitting on the Ivory Coast, has a colorful culture with over 60 different indigenous tribes, all with their own distinct identities and traditions. Cote d'Ivoire has enjoyed economic prosperity since its liberation from France in the 1960s, but threats to the nations stability have emerged.
A military coup began in December 1999 which overthrew the government, while an armed rebellion in 2002 split the country in half. After peace accords and more civil wars, the country had its first peaceful vote in 2010 though unrest is still widespread.
Ivoirian cuisine takes inspiration from neighboring Western African countries, and grated cassava is one of the most popular side dishes. Music is used in all kinds of celebrations and instruments include the drum, cleavers, kpalogo, and more. The mask is the most iconic form of art in the country as the intricacy of designs convey cultural systems. There are many different religions prevelant in the country.
#### References:
- [Capital Image](https://cdn.britannica.com/28/194928-050-D09481CB/Yamoussoukro-Basilica-Cote-dIvoire.jpg)
- [Flags](https://github.com/hjnilsson/country-flags)
- [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
- [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
- [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
- [Economic Information](https://data.worldbank.org/)
- [Writeup Information](https://www.iexplore.com/articles/travel-guides/africa/cote-divoire/history-and-culture)
<file_sep>/_data/blog/eqguinea.md
---
template: BlogPost
path: /eqguinea
date: 2020-11-23T07:08:53.137Z
title: Equatorial Guinea
thumbnail: 'https://www.nationsonline.org/gallery/Equatorial_Guinea/Malabo-Equatorial-Guinea.jpg'
metaDescription: Country Page Post
---
# General Information about Equatorial Guinea
- Name: Equatorial Guinea
- African Abbreviation: RE
- Capital City: Malabo
- Population: 1,402,985
- African Subregion: Middle Africa
### Country Flag

## Economic Information
- GDP (thousands): 11,026,774.95
- GDP Growth (annual %) in 2019: -5.6
- GDP Per Capita: 8131.9
## A Brief Overview of Equatorial Guinea
The country of Equatorial Guinea is a former Spanish colony and is still heavily influenced by Europe. The food is known for its variety and is heavily influenced by traditional African food. Rural dishes are based around meat and fish, while urban areas offer more Spanish-influenced options. Fish is more commonly used in modern dishes.
Traditional drinks are malamba and Osang. Palm Wine is also produced locally from various species of palm trees.
The country is known for traditional sculpture and mask making, producing some famous artists. Ballet is also something the countries residents are skilled in. Soccer is the sport of choice in the country.
#### References:
- [Capital Image](https://www.nationsonline.org/gallery/Equatorial_Guinea/Malabo-Equatorial-Guinea.jpg)
- [Flags](https://github.com/hjnilsson/country-flags)
- [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
- [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
- [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
- [Economic Information](https://data.worldbank.org/)
- [Writeup Information](http://www.experience-africa.de/index.php?en_equatorial-guinea_culture)
<file_sep>/_data/blog/tunisia.md
---
template: BlogPost
path: /tunisia
date: 2020-11-23T07:08:53.137Z
title: Tunisia
thumbnail: 'https://cdn.britannica.com/67/192667-050-46F5A150/Tunis-Tunisia.jpg'
metaDescription: Country Page Post
---
# General Information about Tunisia
- Name: Tunisia
- African Abbreviation:
- Capital City:
- Population:
- African Subregion:
### Country Flag

## Economic Information
- GDP (thousands):
- GDP Growth (annual %) in 2019:
- GDP Per Capita:
## A Brief Overview of Tunisia
Tunisia is 99% Muslim and religion is very important in the region. The country features cafes for just men and cafes open to both genders. The Tunisian diet is heavy in carbs and sugary foods. The three main languages spoken in the country are classic Arab, Tunis dialect, and French.
#### References:
- [Capital Image](https://cdn.britannica.com/67/192667-050-46F5A150/Tunis-Tunisia.jpg)
- [Flags](https://github.com/hjnilsson/country-flags)
- [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
- [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
- [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
- [Economic Information](https://data.worldbank.org/)
- [Writeup Information](https://www.alltherooms.com/blog/tunisian-culture/)
<file_sep>/_data/blog/cameroon.md
---
template: BlogPost
path: /cameroon
date: 2020-11-23T07:08:53.137Z
title: Cameroon
thumbnail: 'https://www.nationsonline.org/gallery/Cameroon/Yaounde-City.jpg'
metaDescription: Country Page Post
---
# General Information about Cameroon
- Name: Cameroon
- African Abbreviation: CR
- Capital City: Yaoundé
- Population: 26,545,863
- African Subregion: Middle Africa
### Country Flag

## Economic Information
- GDP (thousands): 38,760,467.03
- GDP Growth (annual %) in 2019: 4
- GDP Per Capita: 1497.9
#### References:
- [Capital Image](https://www.nationsonline.org/gallery/Cameroon/Yaounde-City.jpg)
- [Flags](https://github.com/hjnilsson/country-flags)
- [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
- [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
- [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
- [Economic Information](https://data.worldbank.org/)
<file_sep>/_data/blog/ss.md
---
template: BlogPost
path: /ss
date: 2020-11-23T07:08:53.137Z
title: South Sudan
thumbnail: 'https://cdn.britannica.com/90/150790-050-081325A0/view-Bahr-Al-Jabal-South-Sudan-Juba.jpg'
metaDescription: Country Page Post
---
# General Information about South Sudan
- Name: South Sudan
- African Abbreviation: SS
- Capital City: Juba
- Population: 11,193,725
- African Subregion: Eastern Africa
### Country Flag

## Economic Information
- GDP (thousands): 11,997,800.76
- GDP Growth (annual %) in 2019: -10.8
- GDP Per Capita: 1119.7
## A Brief Overview of South Sudan
Western style clothing is common especially in cities, but traditional dress still varies throughout the country and ethnic groups. Oral tradition is used for folklore and myths, keeping traditions alive. Food production is the single activity that absorbs the energy of the country's people. Milk, lamb, chicken, rice, and vegetables are key ingredients in foods. Civil war has destroyed much of the economy in the South.
#### References:
- [Capital Image](https://cdn.britannica.com/90/150790-050-081325A0/view-Bahr-Al-Jabal-South-Sudan-Juba.jpg)
- [Flags](https://github.com/hjnilsson/country-flags)
- [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
- [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
- [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
- [Economic Information](https://data.worldbank.org/)
- [Writeup Information](https://worldreliefmemphis.org/south-sudan-cultural-profile)
<file_sep>/_data/blog/sudan.md
---
template: BlogPost
path: /sudan
date: 2020-11-23T07:08:53.137Z
title: Sudan
thumbnail: 'https://i.guim.co.uk/img/static/sys-images/Guardian/Pix/pictures/2014/9/17/1410949924529/3bcaf5f6-2542-4046-a885-2be5d6499619-2060x1236.jpeg?width=700&quality=85&auto=format&fit=max&s=98ae1a7400dde0f6afef967049678cb4'
metaDescription: Country Page Post
---
# General Information about Sudan
- Name: Sudan
- African Abbreviation: SU
- Capital City: Khartoum
- Population: 43,849,260
- African Subregion: Northern Africa
### Country Flag

## Economic Information
- GDP (thousands): 18,902,284.48
- GDP Growth (annual %) in 2019: -2.6
- GDP Per Capita: 441.5
## A Brief Overview of Sudan
The North is primarily Arab Muslims while the south is largely black African. There is strong discontent between the two groups and the southern groups have united based on their common dislike for the northern Arabs.
The day usually begins with a cup of tea and Millet is a staple food of the country. Cooking is done in courtyards outside the house on a tin grill called a kanoon.
#### References:
- [Capital Image](https://i.guim.co.uk/img/static/sys-images/Guardian/Pix/pictures/2014/9/17/1410949924529/3bcaf5f6-2542-4046-a885-2be5d6499619-2060x1236.jpeg?width=700&quality=85&auto=format&fit=max&s=98ae1a7400dde0f6afef967049678cb4)
- [Flags](https://github.com/hjnilsson/country-flags)
- [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
- [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
- [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
- [Economic Information](https://data.worldbank.org/)
- [Writeup Information](https://www.everyculture.com/Sa-Th/Sudan.html)
<file_sep>/_data/blog/sao.md
---
template: BlogPost
path: /sao
date: 2020-11-23T07:08:53.137Z
title: Sao Tome and Principe
thumbnail: 'https://www.worldatlas.com/upload/ff/d8/17/shutterstock-669439993.jpg'
metaDescription: Country Page Post
---
# General Information about Sao Tome and Principe
- Name: <NAME> and Principe
- African Abbreviation: RS
- Capital City: Sao Tome
- Population: 219,159
- African Subregion: Middle Africa
### Country Flag

## Economic Information
- GDP (thousands): 429,016.61
- GDP Growth (annual %) in 2019: 2.4
- GDP Per Capita: 1994.9
## A Brief Overview of Sao Tome and Principe
The main cultural influences are Portuguese and West Africa. The country's music combines African and Portuguese styles. Portuguese ballroom dancing is popular.
#### References:
- [Capital Image](https://www.worldatlas.com/upload/ff/d8/17/shutterstock-669439993.jpg)
- [Flags](https://github.com/hjnilsson/country-flags)
- [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
- [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
- [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
- [Economic Information](https://data.worldbank.org/)
- [Writeup Information](https://www.iexplore.com/articles/travel-guides/africa/sao-tome-and-principe/history-and-culture)
<file_sep>/_data/blog/sa.md
---
template: BlogPost
path: /sa
date: 2020-11-23T07:08:53.137Z
title: South Africa
thumbnail: 'https://dynamic-media-cdn.tripadvisor.com/media/photo-o/14/10/2e/1e/cape-town.jpg?w=1000&h=600&s=1'
metaDescription: Country Page Post
---
# General Information about South Africa
- Name: South Africa
- African Abbreviation: SA
- Capital City: Cape Town
- Population: 59,308,690
- African Subregion: Southern Africa
### Country Flag

## Economic Information
- GDP (thousands): 351,431,649.24
- GDP Growth (annual %) in 2019: .2
- GDP Per Capita: 6001.4
## A Brief Overview of South Africa
"A great gulf still exists between the white minority and the Black majority in matters of education and economic opportunity. Yet, South Africa is making steady progress in erasing some of these historic disparities and their consequences. Daily life is better for most of its people, and culture and the arts, which sometimes were forced into exile, are flourishing in the free climate of the postapartheid era."
#### References:
- [Capital Image](https://dynamic-media-cdn.tripadvisor.com/media/photo-o/14/10/2e/1e/cape-town.jpg?w=1000&h=600&s=1)
- [Flags](https://github.com/hjnilsson/country-flags)
- [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
- [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
- [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
- [Economic Information](https://data.worldbank.org/)
- [Writeup Information](https://www.britannica.com/place/South-Africa/Cultural-life)
<file_sep>/_data/blog/algeria.md
---
template: BlogPost
path: /algeria
date: 2020-11-23T07:08:53.137Z
title: Algeria
thumbnail: 'https://cdn.britannica.com/00/77300-050-F7C43044/View-city-Algiers-Algeria.jpg'
metaDescription: Algeria Page
---
# General Information about Algeria
- Name: Algeria
- African Abbreviation: AL
- Capital City: Algiers (Alger, El Djazâir, Al Jaza'ir)
- Population: 43,851,044
- African Subregion: Northern Africa
### Country Flag

## Economic Information
- GDP (thousands):
- GDP Growth (annual %) in 2019:
- GDP Per Capita:
#### References:
- [Capital Image](https://cdn.britannica.com/00/77300-050-F7C43044/View-city-Algiers-Algeria.jpg)
- [Flags](https://github.com/hjnilsson/country-flags)
- [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
- [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
- [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
- [Economic Information](https://data.worldbank.org/)
<file_sep>/_data/blog/togo.md
---
template: BlogPost
path: /togo
date: 2020-11-23T07:08:53.137Z
title: Togo
thumbnail: 'https://www.tourist-destinations.com/wp-content/uploads/2013/01/04760f93_z.jpg'
metaDescription: Country Page Post
---
# General Information about Togo
- Name: Togo
- African Abbreviation: TO
- Capital City: Lome
- Population: 8,278,724
- African Subregion: Western Africa
### Country Flag

## Economic Information
- GDP (thousands): 5,459,979.42
- GDP Growth (annual %) in 2019: 5.3
- GDP Per Capita: 675.5
## A Brief Overview of Togo
The country declared independence in 1960 in what was a peaceful transition. Togo has a rich culture and over 30 tribal groups. French is the official language though others are spoken.
#### References:
- [Capital Image](https://www.tourist-destinations.com/wp-content/uploads/2013/01/04760f93_z.jpg)
- [Flags](https://github.com/hjnilsson/country-flags)
- [Abbreviation](https://planetarynames.wr.usgs.gov/Abbreviations)
- [Capital Cities](https://www.nationsonline.org/oneworld/capitals_africa.htm)
- [Population](https://www.worldometers.info/population/countries-in-africa-by-population/)
- [Economic Information](https://data.worldbank.org/)
- [Writeup Information](https://www.iexplore.com/articles/travel-guides/africa/togo/history-and-culture)
|
9fe76b05c7965024df35da73469ecdd37ac984a7
|
[
"Markdown",
"JavaScript"
] | 52 |
Markdown
|
lussierc/africancountryinfocenter.netlify.app
|
68cd5f7b172647ee311cdb165418bb199845dcb8
|
90f590f87d5dfc0003ea703fcb53033db1b581ba
|
refs/heads/main
|
<repo_name>hmzsumon/simple-react-assignment-7<file_sep>/README.md
## **React Sports**
_It's a players' website. Here is information about different players. Users will be able to select players from this website. It is made for learning purposes._
<br>
- #### _**Live Demo:**_ https://simple-raect-assignment-7.netlify.app/
- #### _**Source Code:**_ https://github.com/hmzsumon/simple-react-assignment-7
<br>
## Technologies










<!--  -->
<br>
<br>

<file_sep>/src/App.js
import './App.css';
import 'bootstrap/dist/css/bootstrap.min.css';
import Navbar from './components/Navbar';
import PlayersList from './components/PlayersList';
import { Route, Switch } from 'react-router-dom';
import PlayerDetails from './components/PlayerDetails';
import TeamsPlayers from './components/TeamsPlayers';
function App() {
return (
<div>
<Navbar />
<Switch>
<Route exact path='/' component={PlayersList} />
<Route path='/details' component={PlayerDetails} />
<Route path='/your_team' component={TeamsPlayers} />
</Switch>
</div>
);
}
export default App;
<file_sep>/src/components/Player.js
import {
Card,
CardActionArea,
CardContent,
CardMedia,
Grid,
Typography,
Button,
makeStyles,
ButtonGroup,
} from '@material-ui/core';
// import Cookies from 'js-cookie';
import React from 'react';
import { useEffect } from 'react';
import { useState } from 'react';
import { useContext } from 'react';
import { Link } from 'react-router-dom';
import { Store } from '../utils/Store';
const Player = ({ player }) => {
const { dispatch } = useContext(Store);
const [inListed, setInListed] = useState(false);
const classes = useStyles();
const { img, fullName, id, salary } = player;
const showDetailsHandler = () => {
dispatch({ type: 'SHOW_DETAILS', payload: id });
};
const addToListHandler = () => {
dispatch({ type: 'ADD_TO_TEAMS', payload: id });
setInListed(true);
};
useEffect(() => {}, []);
return (
<Grid item md={3} xs={12}>
<Card>
<CardActionArea>
<CardMedia className={classes.media}>
<Link to='/details'>
<img src={img} alt={fullName} onClick={showDetailsHandler} />
</Link>
</CardMedia>
<CardContent>
<Typography component='h2' variant='h6'>
<strong>Full Name:</strong>
</Typography>
<Typography>{fullName}</Typography>
<Typography>
<strong>Salary:</strong>
</Typography>
<Typography>BDT {salary} /-</Typography>
</CardContent>
</CardActionArea>
{inListed ? (
<ButtonGroup
variant='contained'
fullWidth
color='primary'
aria-label='contained primary button group'
>
<Button color='secondary' disabled>
listed
</Button>
<Button>
<Link className={classes.link} to='your_team'>
Your Team
</Link>
</Button>
</ButtonGroup>
) : (
<Button
type='button'
variant='contained'
fullWidth
size='small'
color='primary'
onClick={() => addToListHandler(id)}
>
Add To List
</Button>
)}
</Card>
</Grid>
);
};
export default Player;
const useStyles = makeStyles({
media: {
display: 'Grid',
justifyContent: 'center',
},
link: {
color: '#ffffff',
'&:hover': {
color: '#ffffff',
textDecoration: 'none',
},
},
});
|
50662a52bdff5e76a02e98900b758771667db0e8
|
[
"Markdown",
"JavaScript"
] | 3 |
Markdown
|
hmzsumon/simple-react-assignment-7
|
6caba827c4b8bfc624a2542c5c4a69db9d767962
|
9e1609a5959ed052219f0c66838912f83c54fa36
|
refs/heads/master
|
<repo_name>Theng/react-native-stellar-sdk<file_sep>/src/eventsource.js
import EventSource from 'eventsource';
global.EventSource = window.EventSource = EventSource;
global.location = window.location = {protocol: 'https:'};
|
1b34dc897667a92e150758394f3e5529c9e66776
|
[
"JavaScript"
] | 1 |
JavaScript
|
Theng/react-native-stellar-sdk
|
24b2a5deb503046df7ae6e5cad1a8594cd92096b
|
7165e9c0fa90483ec1aaba5374cdd0759619a2c5
|
refs/heads/master
|
<file_sep>using System;
public class DelegateTarget<T> : Reference<T>
where T : class
{
new public T Target => (T)base.Target;
public DelegateTarget(T target, bool trackResurrection = false)
: base(target, trackResurrection)
{ }
public Action GetAction(Action<T> proto)
{
if (proto.Target == base.Target)
throw new ArgumentException();
return () => proto(Target);
}
public Action<TArgs> GetAction<TArgs>(Action<T, TArgs> proto)
{
if (proto.Target == base.Target)
throw new ArgumentException();
return args => proto(Target, args);
}
public Func<TResult> GetFunc<TResult>(Func<T, TResult> proto)
{
if (proto.Target == base.Target)
throw new ArgumentException();
return () => proto(Target);
}
public Func<TArgs, TResult> GetFunc<TArgs, TResult>(Func<T, TArgs, TResult> proto)
{
if (proto.Target == base.Target)
throw new ArgumentException();
return args => proto(Target, args);
}
public EventHandler<TArgs> GetHandler<TArgs>(Action<T, object, TArgs> proto)
where TArgs : EventArgs
{
if (proto.Target == base.Target)
throw new ArgumentException();
return (sender, args) => proto(Target, sender, args);
}
public EventHandler<TArgs> GetHandler<TArgs>(EventHandler<TArgs> proto)
where TArgs : EventArgs
{
if (proto.Target != base.Target)
throw new ArgumentException();
var mi = proto.Method;
return GetHandler<TArgs>((target, sender, args) =>
{
if (target == null) return;
mi.Invoke(target, new[] { sender, args });
});
}
public EventHandler GetHandler(Action<T, object, EventArgs> proto)
{
if (proto.Target == base.Target)
throw new ArgumentException();
return (sender, args) => proto(Target, sender, args);
}
public EventHandler GetHandler(EventHandler proto)
{
if (proto.Target != base.Target)
throw new ArgumentException();
var mi = proto.Method;
return GetHandler((target, sender, args) =>
{
if (target == null) return;
mi.Invoke(target, new[] { sender, args });
});
}
}
<file_sep>using System;
public abstract class Reference<T> : WeakReference
where T : class
{
public override object Target
{
get { return base.Target; }
set { base.Target = (T)value; }
}
internal Reference(T target, bool trackResurrection)
: base(target, trackResurrection)
{ }
}
<file_sep>public static class DelegateTargetEx
{
public static DelegateTarget<T> ToWeakDelegateTarget<T>(this T target, bool trackResurrection = false)
where T : class
=> new DelegateTarget<T>(target, trackResurrection);
}
<file_sep>using System;
using System.Linq;
using Xunit;
public class WeakTests
{
class Worker
{
public static Worker Instance;
public static int NumVal;
public void Inc()
=> NumVal++;
public void Inc(int v)
=> NumVal += v;
public int Dec()
=> NumVal--;
public int Dec(int v)
=> NumVal -= v;
~Worker()
{
Instance = this;
}
public void EventHandler(object sender, EventArgs e)
=> NumVal = -NumVal;
}
event EventHandler TestEvent;
event EventHandler<EventArgs> TestEvent1;
[Fact]
public void ForEverything()
{
var worker = new Worker();
var worker0 = new Worker();
{
Worker.NumVal = 233;
Assert.Equal(233, Worker.NumVal);
worker.Inc();
Assert.Equal(234, Worker.NumVal);
worker.Inc(20);
Assert.Equal(254, Worker.NumVal);
Assert.Equal(234, worker.Dec(20));
Assert.Equal(234, worker.Dec());
}
var weak = worker.ToWeakDelegateTarget();
var weak0 = worker0.ToWeakDelegateTarget();
var weak1 = worker.ToWeakDelegateTarget(true);
TestEvent += weak.GetHandler(worker.EventHandler);
TestEvent1 += weak1.GetHandler<EventArgs>(worker.EventHandler);
{
var inc0 = weak.GetAction(t => t?.Inc());
var inc1 = weak.GetAction<int>((t, v) => t?.Inc(v));
var dec0 = weak.GetFunc(t => t?.Dec());
var dec1 = weak.GetFunc<int, int?>((t, v) => t?.Dec(v));
Assert.Equal(233, Worker.NumVal);
inc0();
Assert.Equal(234, Worker.NumVal);
inc1(20);
Assert.Equal(254, Worker.NumVal);
Assert.Equal(234, dec1(20));
Assert.Equal(234, dec0());
TestEvent1(null, null);
Assert.Equal(-233, Worker.NumVal);
TestEvent(null, null);
Assert.Equal(233, Worker.NumVal);
GC.KeepAlive(worker);
worker = null;
GC.Collect();
//GC.Collect();
Assert.Equal(233, Worker.NumVal);
inc0();
Assert.Equal(233, Worker.NumVal);
inc1(20);
Assert.Equal(233, Worker.NumVal);
Assert.Equal(null, dec0());
Assert.Equal(null, dec1(20));
}
{
var inc0 = weak0.GetAction(t => t?.Inc());
var inc1 = weak0.GetAction<int>((t, v) => t?.Inc(v));
var dec0 = weak0.GetFunc(t => t?.Dec());
var dec1 = weak0.GetFunc<int, int?>((t, v) => t?.Dec(v));
Assert.Equal(233, Worker.NumVal);
inc0();
Assert.Equal(234, Worker.NumVal);
inc1(20);
Assert.Equal(254, Worker.NumVal);
Assert.Equal(234, dec1(20));
Assert.Equal(234, dec0());
((Reference<Worker>)weak0).Target = Worker.Instance;
GC.KeepAlive(worker0);
worker0 = null;
GC.Collect();
//GC.Collect();
Assert.Equal(233, Worker.NumVal);
inc0();
Assert.Equal(234, Worker.NumVal);
inc1(20);
Assert.Equal(254, Worker.NumVal);
Assert.Equal(234, dec1(20));
Assert.Equal(234, dec0());
}
{
var inc0 = weak1.GetAction(t => t?.Inc());
var inc1 = weak1.GetAction<int>((t, v) => t?.Inc(v));
var dec0 = weak1.GetFunc(t => t?.Dec());
var dec1 = weak1.GetFunc<int, int?>((t, v) => t?.Dec(v));
Assert.Equal(233, Worker.NumVal);
inc0();
Assert.Equal(234, Worker.NumVal);
inc1(20);
Assert.Equal(254, Worker.NumVal);
Assert.Equal(234, dec1(20));
Assert.Equal(234, dec0());
TestEvent(null, null);
Assert.Equal(233, Worker.NumVal);
TestEvent1(null, null);
Assert.Equal(-233, Worker.NumVal);
Worker.Instance = null;
GC.Collect();
//GC.Collect();
TestEvent1(null, null);
Assert.Equal(-233, Worker.NumVal);
Assert.Equal(-233, Worker.NumVal);
inc0();
Assert.Equal(-233, Worker.NumVal);
inc1(20);
Assert.Equal(-233, Worker.NumVal);
Assert.Equal(null, dec0());
Assert.Equal(null, dec1(20));
}
}
}
|
cae54f1e602ccf7f7d4316b48e6035503a5fd234
|
[
"C#"
] | 4 |
C#
|
OmniKits/Weak
|
e8fcdb6950228a314cb0ddcd7f65c54e89b2e5d0
|
c74e5b031edb4be5743c73d8f29c735137b32443
|
refs/heads/master
|
<file_sep>#---
# Excerpted from "Seven Languages in Seven Weeks",
# published by The Pragmatic Bookshelf.
# Copyrights apply to this code. It may not be used to create training material,
# courses, books, articles, and the like. Contact us if you are in doubt.
# We make no guarantees that this code is fit for any purpose.
# Visit http://www.pragmaticprogrammer.com/titles/btlang for more book information.
#---
# Doubt this is the correct way to solve it
class CsvRow
attr_accessor :headers, :csv_contents
def initialize(csv_contents, headers)
@csv_contents = csv_contents
@headers = headers
end
def method_missing name, *args
@csv_contents.map{|i| i[@headers.index(name.to_s)]}
end
end
module ActsAsCsv
def self.included(base)
base.extend ClassMethods
end
module ClassMethods
def acts_as_csv
include InstanceMethods
end
end
module InstanceMethods
def read
@csv_contents = []
filename = self.class.to_s.downcase + '.txt'
file = File.new(filename)
@headers = file.gets.chomp.split(', ')
file.each do |row|
@csv_contents << row.chomp.split(', ')
end
end
attr_accessor :headers, :csv_contents
def each(&block)
block.call CsvRow.new(@csv_contents, @headers)
end
def initialize
read
end
end
end
class RubyCsv # no inheritance! You can mix it in
include ActsAsCsv
acts_as_csv
end
csv = RubyCsv.new
csv.each {|row| puts row.one}
<file_sep># Print the contents of an array of sixteen numbers
(1..16).each{|num| if num % 4 == 0 then print "#{num}\n" else print "#{num} " end }
# Now, do the same with each_slice in Enumerable
(1..16).each_slice(4) {|num| p num}
<file_sep># Print the string “Hello, world.”
puts "Hello, world"
# For the string “Hello, Ruby.” find the index of the word “Ruby.”
a = "Hello, Ruby."
/Ruby/ =~ a
# Print your name ten times.
i = 0
while i < 10
puts "Domantas"
i += 1
end
# Print the string “This is sentence number 1,” where the number 1
# changes from 1 to 10.
(1..10).each {|num| puts "This is sentence number #{num}"}
# Guess the random number
number = rand(10)
picked = 11
puts "Try to guess the number!"
while picked != number
picked = (gets)[0].to_i
if picked < number
puts "To low!"
elsif picked > number
puts "To high"
end
end
puts "Correct!"<file_sep># Write a simple grep that will print the lines of a file having any occurrences of a phrase anywhere in that line.
# You will need to do a simple regular expression match and read lines from a file.
# (This is surprisingly simple in Ruby.) If you want, include line numbers.
def grep(regex, filename)
file = File.readlines(filename)
file = file.each_with_index.map{|x,i| "#{i+1} #{x}"}.map(&:chomp).select{|i| i[regex]}
puts file
end
if ARGV[0] && ARGV[1]
grep(ARGV[0], ARGV[1])
else
puts "grep [regex] [filename]"
end<file_sep># Seven Languages in Seven Weeks
Hi there! This is my repository consisting of solutions for the different tasks contained within the book [Seven Languages in Seven Weeks](https://www.amazon.com/Seven-Languages-Weeks-Programming-Programmers/dp/193435659X). I've also added the example files which are contained within the book. Feel free to have a look through the code! :smile:
<p align="center"><a href="https://www.amazon.com/Seven-Languages-Weeks-Programming-Programmers/dp/193435659X"><img width="300" src="https://learning.oreilly.com/library/cover/9781680500059/250w/"/></a></p>
## Covered Languages
- [Ruby](Ruby)
- [Io](Io)
- [Prolog (GNU Version)](Prolog)
- [Scala](Scala)
|
7c9b4ea875e9cc46c63b4034cf9339fe9e9dc7b3
|
[
"Markdown",
"Ruby"
] | 5 |
Ruby
|
lionas32/7-languages
|
eaa026d2193ea6e6fa5adc749c87456c0b531b8a
|
f06c09985b1fe132a10a9bc6cee8f6765d9f516b
|
refs/heads/master
|
<repo_name>ImmoweltGroup/eslint-config-immowelt-es6<file_sep>/src/index.spec.js
const { linter } = require('eslint');
const config = require('./index.js');
const runEslint = str => linter.verify(str, config, { filename: 'foo.js' });
describe('eslint-config-immowelt-es6', () => {
it('should export a valid eslint config object structure.', () => {
expect(typeof config).toBe('object');
});
it('should not throw errors when checking a valid code fixture.', () => {
const errors = runEslint(`var foo = true;
if (foo) {
console.warn('warning');
}
`);
expect(errors).toEqual([]);
});
});
|
dafd7af271f69aff93b17abbf35c95e9c5867dfb
|
[
"JavaScript"
] | 1 |
JavaScript
|
ImmoweltGroup/eslint-config-immowelt-es6
|
8a0fe83bd8957c65af49f0d1d8d5f8ff3749789f
|
7d9f582e8a6965b88f7a7d87a61c0e0e80354580
|
refs/heads/master
|
<repo_name>Hlbecerra/Cajero<file_sep>/Cajero/Program.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Cajero
{
class Program
{
static void Main(string[] args)
{
string[] nombre;
int[] numeroCuenta;
double[] saldoApertura;
int aux;
double saldo;
nombre = new string[3];
numeroCuenta = new int[3];
saldoApertura = new double[3];
char op;
int s1;
do
{
Console.Clear();
Console.WriteLine("\n---------ELIGA UNA OPCION--------");
Console.WriteLine("1. REGISTRAR USUARIO");
Console.WriteLine("2. CONSIGNAR A CUENTA BANCARIA");
Console.WriteLine("3. RETIRAR DE CUENTA BANCARIA");
Console.WriteLine("4. CONSULTAR CUENTA BANCARIA");
Console.WriteLine("5. SALIR");
Console.WriteLine("--------------MENU---------------");
s1 = int.Parse(Console.ReadLine());
switch (s1)
{
case 1:
Console.Clear();
op = 'N';
do
{
int f = 0;
Console.WriteLine("BIENVENIDO");
Console.WriteLine("DIGITE EL NOMBRE DEL USUARIO:");
nombre[f] = Console.ReadLine();
Console.WriteLine("\nDIGITE EL NUMERO DE CUENTA:");
string linea;
linea = Console.ReadLine();
numeroCuenta[f] = int.Parse(linea);
Console.WriteLine("\nDIGITE EL SALDO DE APERTURA:");
string linea1;
linea1 = Console.ReadLine();
saldoApertura[f] = double.Parse(linea1);
f = f + 1;
Console.WriteLine("\nDESEA REGISTRAR OTRO USUARIO?");
op = char.Parse(Console.ReadLine());
} while (op == 'S');
break;
case 2:
Console.Clear();
Console.WriteLine("DIGITE EL NUMERO DE CUENTA:");
aux = int.Parse(Console.ReadLine());
for (int f = 0; f < numeroCuenta.Length; f++)
{
if (aux == numeroCuenta[f])
{
Console.WriteLine("CUENTA ENCONTRADA...");
Console.WriteLine("\nNumero cuenta: " + numeroCuenta[f]);
Console.Write("\nNombre: " + nombre[f]);
Console.Write("\nSaldo: " + saldoApertura[f]);
Console.WriteLine("\nDIGITE LA CANTIDAD DE DINERO QUE DESEA DEPOSITAR");
saldo = double.Parse(Console.ReadLine());
saldoApertura[f] = saldoApertura[f] + saldo;
Console.WriteLine("\nEL NUEVO SALDO ES DE: ");
Console.Write(saldoApertura[f]);
op = 'S';
}
else if (f == 19)
{
Console.WriteLine("No se encontre la cuenta digitada");
}
}
Console.ReadKey();
break;
case 3:
Console.Clear();
Console.WriteLine("DIGITE EL NUMERO DE CUENTA:");
aux = int.Parse(Console.ReadLine());
for (int f = 0; f < numeroCuenta.Length; f++)
{
if (aux == numeroCuenta[f])
{
Console.WriteLine("CUENTA ENCONTRADA...");
Console.WriteLine("\nNumero cuenta: " + numeroCuenta[f]);
Console.Write("\nNombre: " + nombre[f]);
Console.Write("\nSaldo: " + saldoApertura[f]);
Console.WriteLine("\nDIGITE LA CANTIDAD DE DINERO QUE DESEA RETIRAR");
saldo = double.Parse(Console.ReadLine());
saldoApertura[f] = saldoApertura[f] - saldo;
Console.WriteLine("\nEL NUEVO SALDO ES DE: ");
Console.Write(saldoApertura[f]);
op = 'S';
}
else if (f == 19)
{
Console.WriteLine("No se encontre la cuenta digitada");
}
}
Console.ReadKey();
break;
case 4:
Console.Clear();
Console.WriteLine("DIGITE EL NUMERO DE CUENTA:");
aux = int.Parse(Console.ReadLine());
for (int f = 0; f < numeroCuenta.Length; f++)
{
if (aux == numeroCuenta[f])
{
Console.WriteLine("CUENTA ENCONTRADA...");
Console.WriteLine("\nNumero cuenta: " + numeroCuenta[f]);
Console.Write("\nNombre: " + nombre[f]);
Console.Write("\nSaldo: " + saldoApertura[f]);
op = 'S';
}
else if (f == 19)
{
Console.WriteLine("No se encontre la cuenta digitada");
}
}
Console.ReadKey();
break;
case 5:
break;
default:
Console.WriteLine("OPCION INVALIDA");
break;
}
} while (s1 != 5);
}
}
}
|
8f6e5c2184c00dca625c6d16c7f524f84cdbc52a
|
[
"C#"
] | 1 |
C#
|
Hlbecerra/Cajero
|
573e895baa1d1b78068f5455edf96cdfd8deac7c
|
df8e910df0376723167abac5a5dde7cfd9dbf86c
|
refs/heads/master
|
<file_sep>0- install node
1- docker build -t federicobottoni/node-web-app
2- docker-compose build
3- docker-compose up
https://nodejs.org/en/docs/guides/nodejs-docker-webapp/
https://docs.docker.com/docker-for-windows/kubernetes/
https://node-postgres.com/features/connecting
https://prometheus.io/docs/visualization/grafana/#grafana-support-for-prometheus
<NAME> 806944, <NAME> 808292
L'applicativo consiste in una webapp in Node.js con database PostgreSQL i cui ambienti sono containerizzati con Docker, orchestrati con Kubernetes e monitorati da Prometheus supportato da Grafana. Il sistema soddisfa i requisiti di Containerization, Provisioning e Monitoring.
<file_sep>CREATE TABLE IF NOT EXISTS articles (
id int,
title text NOT NULL,
author text NULL,
description text NULL,
PRIMARY KEY(id)
);
INSERT INTO articles VALUES
(1, 'Example', '<NAME>', 'Example of a scientific article.'),
(2, 'Testing ', '<NAME>', 'Another example of a scientific article.'),
(3, 'Example42', '<NAME>', 'I want to buy a onesie… but know it won’t suit me.
I would rather be a bird than a fish.
She did her best to help him.
If I don’t like something, I’ll stay away from it.
Let me help you with your baggage.
She folded her handkerchief neatly.
If you like tuna and tomato sauce- try combining the two. It’s really not as bad as it sounds.
Two seats were vacant.
He didn’t want to go to the dentist, yet he went anyway.
We have a lot of rain in June.
My Mum tries to be cool by saying that she likes all the same things that I do.
We have never been to Asia, nor have we visited Africa.
Tom got a small piece of pie.
Wow, does that work?
The lake is a long way from here.
She did not cheat on the test, for it was not the right thing to do.
I really want to go to work, but I am too sick to drive.
Rock music approaches at high velocity.
He said he was not there yesterday; however, many people saw him there.
The book is in front of the table.
If the Easter Bunny and the Tooth Fairy had babies would they take your teeth and leave chocolate for you?
It was getting dark, and we weren’t there yet.
Last Friday in three week’s time I saw a spotted striped blue worm shake hands with a legless lizard.
He ran out of money, so he had to stop playing poker.
Writing a list of random sentences is harder than I initially thought it would be.
They got there early, and they got really good seats.
She borrowed the book from him many years ago and has not yet returned it.
I currently have 4 windows open up… and I don’t know why.
He turned in the research paper on Friday; otherwise, he would have not passed the class.
Italy is my favorite country; in fact, I plan to spend two weeks there next year.'),
(4, 'Example82', '<NAME>', 'A purple pig and a green donkey flew a kite in the middle of the night and ended up sunburnt.
He ran out of money, so he had to stop playing poker.
Last Friday in three week’s time I saw a spotted striped blue worm shake hands with a legless lizard.
Italy is my favorite country; in fact, I plan to spend two weeks there next year.
She borrowed the book from him many years ago and has not yet returned it.
Where do random thoughts come from?
I was very proud of my nickname throughout high school but today- I couldn’t be any different to what my nickname was.
I will never be this young again. Ever. Oh damn… I just got older.
It was getting dark, and we weren’t there yet.
When I was little I had a car door slammed shut on my hand. I still remember it quite vividly.
The old apple revels in its authority.
Should we start class now, or should we wait for everyone to get here?
He turned in the research paper on Friday; otherwise, he would have not passed the class.
Writing a list of random sentences is harder than I initially thought it would be.
The shooter says goodbye to his love.
What was the person thinking when they discovered cow’s milk was fine for human consumption… and why did they do it in the first place!?
She did her best to help him.
I currently have 4 windows open up… and I don’t know why.
Sometimes, all you need to do is completely make an ass of yourself and laugh it off to realise that life isn’t so bad after all.
I checked to make sure that he was still alive.
The stranger officiates the meal.
I hear that Nancy is very pretty.
I am never at home on Sundays.
The sky is clear; the stars are twinkling.
Wednesday is hump day, but has anyone asked the camel if he’s happy about it?
If the Easter Bunny and the Tooth Fairy had babies would they take your teeth and leave chocolate for you?
Everyone was busy, so I went to the movie alone.
Check back tomorrow; I will see if the book has arrived.
The quick brown fox jumps over the lazy dog.
A glittering gem is not enough.'),
(5, 'Test another article', 'Foo bar author', 'We need to rent a room for our party.
She borrowed the book from him many years ago and has not yet returned it.
She did not cheat on the test, for it was not the right thing to do.
Joe made the sugar cookies; Susan decorated them.
The quick brown fox jumps over the lazy dog.
There were white out conditions in the town; subsequently, the roads were impassable.
Someone I know recently combined Maple Syrup & buttered Popcorn thinking it would taste like caramel popcorn. It didn’t and they don’t recommend anyone else do it either.
She advised him to come back at once.
She wrote him a long letter, but he did not read it.
The clock within this blog and the clock on my laptop are 1 hour different from each other.
We have never been to Asia, nor have we visited Africa.
Is it free?
I want more detailed information.
Wow, does that work?
Let me help you with your baggage.
Christmas is coming.
I am never at home on Sundays.
Two seats were vacant.
Yeah, I think it is a good environment for learning English.
I think I will buy the red car, or I will lease the blue one.
Do not step on the broken glass.
Rock music approaches at high velocity.
I love eating toasted cheese and tuna sandwiches.
She did her best to help him.
The river stole the gods.
Should we start class now, or should we wait for everyone to get here?
Sometimes, all you need to do is completely make an ass of yourself and laugh it off to realise that life isn’t so bad after all.
The book is in front of the table.
He told us a very exciting adventure story.
Last Friday in three week’s time I saw a spotted striped blue worm shake hands with a legless lizard.');<file_sep>version: "3"
services:
app:
container_name: app
build: "./app"
image: app:tag
restart: always
volumes:
- app_data:/app
depends_on:
- db
links:
- db
ports:
- 1010:1010
environment:
- PGHOST=db
- PGPORT=5432
- PGUSER=dbuser
- PGPASSWORD=<PASSWORD>
- PGDATABASE=articlesdb
- NODE_ENV=development
db:
container_name: db
image: postgres
build: "./db"
restart: always
volumes:
- db_data:/var/lib/postgresql/data
environment:
- POSTGRES_DB=articlesdb
- POSTGRES_USER=dbuser
- POSTGRES_PASSWORD=<PASSWORD>
- PGDATA=/tmp
ports:
- 5432:5432
prometheus:
container_name: prometheus
image: prom/prometheus:latest
ports:
- 9090:9090
volumes:
- ./prometheus/prometheus.yml:/etc/prometheus/prometheus.yml
- prometheus_data:/etc/prometheus/
- prometheus_data:/prometheus
command:
- "--config.file=/etc/prometheus/prometheus.yml"
- "--storage.tsdb.path=/prometheus"
- "--web.console.libraries=/etc/prometheus/console_libraries"
- "--web.console.templates=/etc/prometheus/consoles"
- "--storage.tsdb.retention=200h"
- "--web.enable-lifecycle"
depends_on:
- cadvisor
cadvisor:
image: google/cadvisor:latest
container_name: cadvisor
ports:
- 7070:8080
volumes:
- /:/rootfs:ro
- /var/run:/var/run:rw
- /sys:/sys:ro
- /var/lib/docker/:/var/lib/docker:ro
depends_on:
- db
- app
grafana:
container_name: grafana
image: grafana/grafana:latest
volumes:
- grafana_data:/var/lib/grafana
environment:
- GF_SECURITY_ADMIN_USER=${ADMIN_USER:-admin}
- GF_SECURITY_ADMIN_PASSWORD=${ADMIN_PASSWORD:-<PASSWORD>}
- GF_USERS_ALLOW_SIGN_UP=false
restart: always
expose:
- 3000
ports:
- 3000:3000
volumes:
grafana_data: {}
prometheus_data: {}
db_data: {}
app_data: {}
<file_sep>FROM postgres:latest
ADD ./script/init.sql /docker-entrypoint-initdb.d<file_sep>FROM prom/prometheus
#ADD prometheus.yml /etc/prometheus/prometheus.yml
EXPOSE 9090
<file_sep>"use strict";
const express = require("express");
const { Pool } = require("pg");
const NODE_PORT = 1010;
const HOST = "0.0.0.0";
const pool = new Pool();
pool.connect();
const app = express();
console.log(`Running on http://${HOST}:${NODE_PORT}`);
// API
app.get("/api/article/:id", (req, res) => {
const { id } = req.params;
if (!isNaN(parseInt(id))) {
pool
.query("SELECT * FROM articles WHERE id = $1", [id])
.then(data => {
res.send(data.rows.length > 0 && data.rows[0]);
})
.catch(err => {
console.log(err);
res.sendStatus(500);
});
} else {
res.sendStatus(500);
}
});
app.get("/api/articlesNames", (req, res) => {
pool
.query("SELECT id, title FROM articles")
.then(data => {
res.send(data.rows);
})
.catch(err => {
console.log(err);
res.sendStatus(500);
});
});
// ROUTING
app.get("/", (req, res) => {
res.sendFile(__dirname + "/src/index.html");
});
app.get("/assets/style.css", (req, res) => {
res.sendFile(__dirname + "/src/assets/style.css");
});
app.get("/assets/logo.png", (req, res) => {
res.sendFile(__dirname + "/src/assets/logo.png");
});
app.get("/assets/autocomplete.js", (req, res) => {
res.sendFile(__dirname + "/src/assets/autocomplete.js");
});
module.exports = app.listen(NODE_PORT, () => {
console.log("Listening on port", NODE_PORT);
});
<file_sep>## [Processo e Sviluppo del Software] - Assignment 1
# Super Extreme Cool Search for Scientific Articles

Super Extreme Cool Search for Scientific Articles is a simple distributed application allowing for the search of articles against a database.
The core of the application is built on `node.js`, interfacing a `postgresql` database for data management.
## Authors
- **<NAME>** - _806944_
- **<NAME>** - _808292_
## Getting Started
```
$ git clone https://gitlab.com/FedericoBottoni/pss-assignment1
$ cd pss-assignment1
```
## Architecture
The application consists of three main components:
- **front-end**: through which a user is able to query the database through API calls
- **back-end**: which manages servicing the API and querying the DB
- **database**: storing the articles data
## DevOps
We tried to address 4 aspects of the DevOps toolchain for the given assignment. These are:
- Containerization
- Provisioning
- Continuous Integration
- Monitoring
## Containerization
We used Docker and Docker Compose to implement our application as a multi-containerized one. The process generates 5 containers: `app` and `db` are the main containers which constitute the application, while `prometheus`, `cadvisor` and `grafana` are used for monitoring.
To get the application started with `docker-compose`:
```
$ docker-compose up --build
$ docker-compose ps
Name Command State Ports
----------------------------------------------------------------------------
app npm start Up 0.0.0.0:1010->1010/tcp
cadvisor /usr/bin/cadvisor -logtostderr Up 0.0.0.0:7070->8080/tcp
db docker-entrypoint.sh postgres Up 0.0.0.0:5432->5432/tcp
grafana /run.sh Up 0.0.0.0:3000->3000/tcp
prometheus /bin/prometheus --config.f ... Up 0.0.0.0:9090->9090/tcp
```
## Monitoring
For monitoring we used Prometheus, Grafana and cAdvisor.
cAdvisor analyzes and exposes usage and performance data from running container, exposing to Prometheus these metrics - it is to all effects an exporter from third-party system metric to Prometheus metrics.
Prometheus is then able to scrape them, and service them to Grafana, which queries the Prometheus container and is able to give to the user a customizable dashboard to show graphs and the necessary info an admin would need.
The configuration for these monitoring tools is given in the `docker-compose.yml` and the `prometheus/prometheus.yml` files.
This is what the final Grafana dashboard looks like, when the system is correctly running.

## Provisioning
Our tool of choice is Kubernetes.
To address provisioning we have tried different alternatives.
We tried to deploy our application to the Kubernetes cluster hosted by GARR Cloud, to no avail. Unfortunately the short term of the assignment and the concomitance of the All Saints' Day holidays slowed significantly our communication with the GARR assistance to correctly register and authorize us to access the shared cluster.
We have thus decided to deploy our application using Minikube. Minikube is a tool that allows to run a single-node Kubernetes cluster inside a VM on a local machine.
Inside the `k8s\` folder we have provided the configuration for every container, in the form of `service+deployment+volume`.
```
$ minikube start //may take a while...
$ eval $(minikube docker-env) //this is to use the local Docker daemon and use local images
$ kubectl create -f k8s/
$ kubectl get pods
NAME READY STATUS RESTARTS AGE
app-7f54fd667b-bf6mr 1/1 Running 0 1m
cadvisor-847c8b697f-dk46j 0/1 Error 2 1m
db-8b97b649d-ppz9n 1/1 Running 0 1m
grafana-5ccb74444f-9r8vd 1/1 Running 0 1m
prometheus-78d56c69bd-4qnv7 0/1 Error 2 1m
```
Unfortunately, we haven't been able to launch our Prometheus and cAdvisor monitoring pods into the Kubernetes cluster. Issues with the persistency of the volumes caused the pods to cease running. More in-depth study of the technology and troubleshooting were needed to solve the presented issues, but we didn't have the time to tackle them.
Executing `minikube dashboard` from the terminal lets us access the dashboard from the browser, to check eventual details on the status of the cluster.

## Continuous Integration
We have developed continuous integration pipeline implementing a little suite of tests for APIs runnable by Jest package and supertest for node.js in order to check the data structure sent by the database. When a developer commit some changes, the file `.gitlab-ci.yml` specifies which jobs have to be executed in gitlab servers. Those jobs try to build the entire application and, if it succeded, run the API tests. If everything works and there are no errors, the commit is valid.
## Conclusions
The project was deeply interesting, and provided us with the opportunity to learn and study about the technology representing the state of the art of the DevOps ecosystem.
Due to work and Bachelor of CS final test we didn't have enough resources to commit to the project, but we expect that with enough time we could've addressed some issues better.
Future developments may include:
- Deploying the application to a Kubernetes cluster on a Cloud (GARR, Google Cloud Platform, AWS)
- Integrating different Exporters to Prometheus to add different kind of metrics to the system (PostgreSQL metrics, etc)
- Adding to our DevOps chain a Configuration management tool
<file_sep>const request = require("supertest");
const app = require("../server");
describe("Testing apis", () => {
test("articlesNames call: response is defined, is non-empty and records are more then 0", done => {
return request(app)
.get("/api/articlesNames")
.set("Accept", "application/json")
.expect(200)
.then(res => {
expect(res.body).toBeDefined();
expect(res.body.length).toBeGreaterThan(0);
expect(res.body.filter(x => !x.id || !x.title).length).toBe(0);
done();
});
});
test("article call: response is not null and has article attributes", done => {
return request(app)
.get("/api/article/1")
.set("Accept", "application/json")
.expect(200)
.then(res => {
expect(res.body).toBeDefined();
done();
});
});
test("article call: article attributes", done => {
return request(app)
.get("/api/article/test")
.set("Accept", "application/json")
.expect(500)
.end((err, res) => {
if (err) {
return done(err);
}
done();
});
});
});
|
dcd89e98ddbd5f6d5f95e4bc66408db3a7c68270
|
[
"SQL",
"YAML",
"Markdown",
"JavaScript",
"Dockerfile"
] | 8 |
Markdown
|
nhabbash/search-form-devops
|
4ff92fa8196735bc101e2d175609137d31938d21
|
95728149b0e69dd005fa080074e39dafcb5cdb8a
|
refs/heads/master
|
<file_sep>import PersonDetails from "./item-details";
export default PersonDetails;
|
67801cf8fa11f10dd89fc9060446e91af5f19608
|
[
"JavaScript"
] | 1 |
JavaScript
|
Alexdft/ReactStarWars
|
7d550f8b7f55e9f659dddde60d4d416d4d59cf69
|
f2b833595e34280404291a3775c6e6ca2db5a495
|
refs/heads/master
|
<repo_name>GrinGraz/corredora_sql<file_sep>/Stored Procedures/sp_infoPropiedad.sql
CREATE PROCEDURE sp_InfoPropiedad
@idpropiedad AS INT
AS
BEGIN
SELECT p.DIRECCION, p.COMUNA,p.CUIDAD,p.VALORARRIENDO,d.NOMBRE AS DUEÑO,d.APATERNO AS APELLIDO, p.PIEZAS,p.BANOS,p.ARRENDADO,
a.NOMBRE AS ARRENDATARIO,a.APATERNO AS APELLIDO ,c.FECHATERMINO AS FECHATERMINO_CONTRATO
FROM PROPIEDAD AS p join CONTRATO AS c on p.IDPROPIEDAD=c.IDPROPIEDAD join
ARRENDATARIO AS a ON a.RUTARRENDATARIO = c.RUTARRENDATARIO join
TIPOPROPIEDAD AS t ON t.IDTIPO=p.IDTIPO join
DUENO AS d ON d.RUTDUENO = p.RUTDUENO
WHERE p.IDPROPIEDAD=1
END<file_sep>/README.md
# Corredora SQL
Modulo base datos para sistema corredora de propiedades
<file_sep>/Views/vw_masDeDosPropiedades.sql
-- =============================================
-- Author: <NAME>
-- Create date: 23-04-2015
-- Description: Vista encargada de obtener a los
-- dueños con mas de 2 propiedades
-- =============================================
--Crear vista
CREATE VIEW vw_masDeDosPropiedades AS
--Definir select a los campos necesarios
SELECT D.RUTDUENO, D.NOMBRE, D.APATERNO
--Definir from a las tablas necesarias
FROM DUENO D, PROPIEDAD C
--Definir condicion de consulta
WHERE D.RUTDUENO = C.RUTDUENO
--Definir agrupacion de datos
GROUP BY D.RUTDUENO, D.NOMBRE, D.APATERNO, D.AMATERNO
--Definir condicion para la agrupacion
HAVING COUNT(DISTINCT C.IDPROPIEDAD) >=2;
|
7c0dcbe8b73804b3cd55a4dddd84494e0a0e3807
|
[
"Markdown",
"SQL"
] | 3 |
SQL
|
GrinGraz/corredora_sql
|
8b13d24dad9c20653ea2de3c83adf0a5df5ee1a4
|
4f7d06c998789e0073eba762a4df89bb02725bde
|
refs/heads/master
|
<repo_name>przemwo/react-graphql-nodejs-starter<file_sep>/README.md
# react-graphql-nodejs-starter
Starting point for creating React - GraphQL - NodeJS apps.
Based on [graphql-playlist](https://github.com/iamshaunjp/graphql-playlist "graphql-playlist") great tutorial.<file_sep>/client/src/queries/index.js
import {
getPostsQuery,
getUsersQuery,
} from './queries';
export {
getPostsQuery,
getUsersQuery,
};
<file_sep>/client/src/queries/queries.js
import { gql } from 'apollo-boost';
export const getPostsQuery = gql`
{
posts {
id
title
body
}
}
`;
export const getUsersQuery = gql`
{
users {
id
name
}
}
`;
<file_sep>/client/src/components/PostList/PostList.js
import React from 'react';
import { graphql } from 'react-apollo';
import { getPostsQuery } from '../../queries';
class PostList extends React.Component {
renderPosts = () => {
const { loading, posts = [] } = this.props.data;
if(loading) {
return <h2>Loading posts...</h2>
} else {
return(
<ul>
{ posts.map(({ id, title, body }) => (
<li key={ id }>
<h3>{ title }</h3>
<p>{ body }</p>
</li>
)) }
</ul>
);
}
}
render() {
return(
<div>
{ this.renderPosts() }
</div>
);
}
}
export default graphql(getPostsQuery)(PostList);<file_sep>/client/src/components/AddPost/AddPost.js
import React from 'react';
import { graphql } from 'react-apollo';
import { getUsersQuery } from '../../queries';
class AddPost extends React.Component {
renderUsers = () => {
const { loading, users = [] } = this.props.data;
if(loading) {
return(
<option disabled>Loading users...</option>
);
} else {
return(
users.map(({ id, name }) => (
<option key={id} value={id}>{name}</option>
))
);
}
}
render() {
return (
<form>
<div>
<label htmlFor="">Title:</label>
<input type="text"/>
</div>
<div>
<label htmlFor="">Body:</label>
<input type="text"/>
</div>
<div>
<label htmlFor="">User:</label>
<select name="" id="">
<option disabled selected>Select user</option>
{this.renderUsers()}
</select>
</div>
<button>Add post</button>
</form>
);
}
}
export default graphql(getUsersQuery)(AddPost);<file_sep>/client/src/components/AddPost/index.js
import AddPost from './AddPost';
export default AddPost;
|
08a8388833452384509476f38b8723b5e3c4399b
|
[
"Markdown",
"JavaScript"
] | 6 |
Markdown
|
przemwo/react-graphql-nodejs-starter
|
31d6ea5b17807795ee16ab3ccd0747db8b81cafc
|
11c96a804590f6176ace19510a52f6f0d292a4e7
|
refs/heads/master
|
<repo_name>zhoneyboo/project_management<file_sep>/app/EnrollmentInGovernmentAndPrivateSchool.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class EnrollmentInGovernmentAndPrivateSchool extends Model
{
protected $fillable = [
'educational_level',
'province_public',
'province_private',
'calapan_public',
'calapan_private',
'calapan_luc_suc',
'year',
];
}
<file_sep>/app/ForestCover.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class ForestCover extends Model
{
protected $fillable = [
'province',
'closed_forest',
'open_forest',
'mangrove',
'year',
];
}
<file_sep>/app/User.php
<?php
namespace App;
use Illuminate\Notifications\Notifiable;
use Illuminate\Foundation\Auth\User as Authenticatable;
use Illuminate\Database\Eloquent\SoftDeletes;
class User extends Authenticatable
{
use SoftDeletes;
protected $dates = ['deleted_at'];
use Notifiable;
/**
* The attributes that are mass assignable.
*
* @var array
*/
protected $fillable = [
'name', 'email', 'password', '<PASSWORD>', 'role'
];
/**
* The attributes that should be hidden for arrays.
*
* @var array
*/
protected $hidden = [
'password', 'remember_token',
];
public function access()
{
return $this->hasMany('App\UserAccess', 'user_id','id');
}
public function getCreatedAtAttribute($value){
return date('F j, Y h:i:s a', strtotime($value));
}
public function getUpdatedAtAttribute($value){
return date('F j, Y h:i:s a', strtotime($value));
}
}
<file_sep>/app/LicensePermitIssued.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class LicensePermitIssued extends Model
{
protected $fillable = [
'pro_new_first_sem',
'pro_new_second_sem',
'pro_renew_first_sem',
'pro_renew_second_sem',
'non_pro_new_first_sem',
'non_pro_new_second_sem',
'non_pro_renew_first_sem',
'non_pro_renew_second_sem',
'student_first_sem',
'student_second_sem',
'year'
];
}
<file_sep>/database/migrations/2018_11_12_052918_create_nutritional_statuses_table.php
<?php
use Illuminate\Support\Facades\Schema;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Database\Migrations\Migration;
class CreateNutritionalStatusesTable extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create('nutritional_statuses', function (Blueprint $table) {
$table->increments('id');
$table->string('age_range');
$table->integer('normal_boys');
$table->integer('normal_girls');
$table->integer('underweight_boys');
$table->integer('underweight_girls');
$table->integer('severely_boys');
$table->integer('severely_girls');
$table->integer('overweight_boys');
$table->integer('overweight_girls');
$table->year('year');
$table->timestamps();
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::dropIfExists('nutritional_statuses');
}
}
<file_sep>/app/PerformanceIndicatorInPublicSchool.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class PerformanceIndicatorInPublicSchool extends Model
{
protected $fillable = [
'indicator',
'province_elementary',
'province_secondary',
'calapan_elementary',
'calapan_secondary',
'year',
];
}
<file_sep>/app/Http/Controllers/HomeController.php
<?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use App\Barangay;
use DB;
class HomeController extends Controller
{
/**
* Create a new controller instance.
*
* @return void
*/
public function __construct()
{
$this->middleware('auth');
}
/**
* Show the application dashboard.
*
* @return \Illuminate\Http\Response
*/
public function index()
{
return view('home');
}
public function generate_word(Request $request)
{
header("Content-type: application/vnd.ms-word");
header("Content-Disposition: attachment;Filename=document_name.doc");
echo "<html>";
echo "<meta http-equiv=\"Content-Type\" content=\"text/html; charset=Windows-1252\">";
echo "<body>";
echo $request->input('codes');
echo "</body>";
echo "</html>";
return 'success';
}
public function get_addresses()
{
// $addresses = Barangay::where('ref_brgy.prov_id', 24)
// ->join('ref_region', 'ref_region.id','ref_brgy.reg_id')
// ->join('ref_citymun', 'ref_citymun.id','ref_brgy.citymun_id')
// ->join('ref_province', 'ref_province.id','ref_brgy.prov_id')
// ->select(DB::raw('CONCAT(UCASE(brgyDesc), " ", UCASE(citymunDesc), ", ", UCASE(provDesc)) as address'))
// ->get();
// return $addresses;
}
}
<file_sep>/app/Http/Controllers/InfrastructureUtilitiesController.php
<?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use App\RegisteredVehicle;
use App\WaterSystem;
use App\Communication;
use App\Municipality;
use App\StatusOfPower;
use App\EnergizedUnergizedBarangay;
use App\HouseholdPopulationBySourceOfDrinkingWater;
use App\PresentSourceOfPower;
use App\PowerSubstation;
use App\CableTelevisionNetwork;
use App\RadioStation;
use App\SatelliteTelevisionNetwork;
use App\LicensePermitIssued;
class InfrastructureUtilitiesController extends Controller
{
public function getRegisteredVehicle(Request $request){
if($request->has('year'))
{
return $this->defaultRegisteredVehicle($request->input('year'));
}else{
return $this->defaultRegisteredVehicle(date('o'));
}
}
public function defaultRegisteredVehicle($year)
{
$check = RegisteredVehicle::where('year', $year)->get();
if(count($check) <= 0)
{
for ($i=1; $i < 13; $i++) {
$create = RegisteredVehicle::create([
'month' => $i,
'year' => $year
]);
}
}
return RegisteredVehicle::where('year', $year)->get();
}
public function updateRegisteredVehicle(Request $request)
{
$data = $request->input('data');
foreach ($data as $value)
{
$update = RegisteredVehicle::where('id', $value["id"])
->update([
'cars' => $value['cars'] ?? 0,
'suv_uv' => $value['suv_uv'] ?? 0,
'tricycle' => $value['tricycle'] ?? 0,
'truck' => $value['truck'] ?? 0,
'truck_bus' => $value['truck_bus'] ?? 0,
'motorcycle' => $value['motorcycle'] ?? 0,
'uv_jeep' => $value['uv_jeep'] ?? 0,
]);
}
return 'success';
}
public function getWaterSystem(Request $request)
{
$year = date('o');
if($request->has('year'))
{
$year = $request->input('year');
}
$water = WaterSystem::where('year', $year)
->join('municipalities', 'municipalities.id','=','water_systems.municipality')
->select('water_systems.*','municipalities.id as municipality_id','municipalities.municipality as municipality')
->get();
$arr = [];
foreach ($water as $value) {
$arr[$value->municipality][] = [
'id' => $value->id,
'municipality_name' => $value->municipality,
'municipality' => $value->municipality_id,
'water_service_provider' => $value->water_service_provider,
'type_of_water_facility' => $value->type_of_water_facility,
'year' => $value->year,
];
}
return $arr;
}
public function createWaterSystem(Request $request)
{
$create = WaterSystem::create([
'municipality' => $request->input('municipality'),
'water_service_provider' => $request->input('water_service_provider'),
'type_of_water_facility' => $request->input('type_of_water_facility'),
'year' => $request->input('year'),
]);
return 'success';
}
public function updateWaterSystem(Request $request)
{
$update = WaterSystem::where('id', $request->input('id'))
->update([
'municipality' => $request->input('municipality'),
'water_service_provider' => $request->input('water_service_provider'),
'type_of_water_facility' => $request->input('type_of_water_facility'),
'year' => $request->input('year'),
]);
return 'success';
}
public function getCommunication(Request $request)
{
$year = date('o');
if($request->has('year'))
{
$year = $request->input('year');
}
return $this->createCommunication($year);
}
public function createCommunication($year)
{
$check = Communication::where('year', $year)->get();
if(count($check) <= 0)
{
$municipality = Municipality::get();
foreach ($municipality as $value) {
$create = Communication::create([
'municipality' => $value->id,
'smart_communication' => 0,
'globe_telecom' => 0,
'year' => $year,
]);
}
}
return Communication::where('year', $year)
->join('municipalities','municipalities.id','=','communications.municipality')
->select('municipalities.id as municipality','municipalities.municipality as municipality_name','communications.*','municipalities.district')
->get();
}
public function updateCommunication(Request $request)
{
$arr = $request->input('data');
foreach ($arr as $value) {
$update = Communication::where('id', $value['id'])
->update([
'smart_communication' => $value["smart_communication"],
'globe_telecom' => $value["globe_telecom"],
]);
}
return 'success';
}
public function getStatusOfPower(Request $request)
{
$year = date('o');
if($request->has('year'))
{
$year = $request->input('year');
}
return $this->createStatusOfPower($year);
}
public function createStatusOfPower($year)
{
$check = StatusOfPower::where('year', $year)->get();
if(count($check) <= 0)
{
$municipality = Municipality::get();
foreach ($municipality as $value) {
$create = StatusOfPower::create([
'municipality' => $value->id,
'barangay_covered' => 0,
'barangay_energized' => 0,
'sitios_energized' => 0,
'sitios_unerginized' => 0,
'house_connections' => 0,
'members_approved' => 0,
'year' => $year,
]);
}
}
return StatusOfPower::where('year', $year)
->join('municipalities','municipalities.id','=','status_of_powers.municipality')
->select('municipalities.id as municipality','municipalities.municipality as municipality_name','status_of_powers.*','municipalities.district')
->get();
}
public function updateStatusOfPower(Request $request)
{
$arr = $request->input('data');
foreach ($arr as $value) {
$update = StatusOfPower::where('id', $value['id'])
->update([
'barangay_covered' => $value["barangay_covered"],
'barangay_energized' => $value["barangay_energized"],
'sitios_energized' => $value["sitios_energized"],
'sitios_unerginized' => $value["sitios_unerginized"],
'house_connections' => $value["house_connections"],
'members_approved' => $value["members_approved"],
]);
}
return 'success';
}
public function getEnergizedUnergizedBarangay(Request $request)
{
$year = date('o');
if($request->has('year'))
{
$year = $request->input('year');
}
return $this->createEnergizedUnergizedBarangay($year);
}
public function createEnergizedUnergizedBarangay($year)
{
$check = EnergizedUnergizedBarangay::where('year', $year)->get();
if(count($check) <= 0)
{
$municipality = Municipality::get();
foreach ($municipality as $value) {
$create = EnergizedUnergizedBarangay::create([
'municipality' => $value->id,
'potential' => 0,
'energized' => 0,
'year' => $year,
]);
}
}
return EnergizedUnergizedBarangay::where('year', $year)
->join('municipalities','municipalities.id','=','energized_unergized_barangays.municipality')
->select('municipalities.id as municipality','municipalities.municipality as municipality_name','energized_unergized_barangays.*','municipalities.district')
->get();
}
public function updateEnergizedUnergizedBarangay(Request $request)
{
$arr = $request->input('data');
foreach ($arr as $value) {
$update = EnergizedUnergizedBarangay::where('id', $value['id'])
->update([
'municipality' => $value["municipality"],
'potential' => $value["potential"],
'energized' => $value["energized"],
]);
}
return 'success';
}
public function getHouseholdPopulationBySourceOfDrinkingWater(Request $request)
{
$year = date('o');
if($request->has('year'))
{
$year = $request->input('year');
}
return HouseholdPopulationBySourceOfDrinkingWater::where('year', $year)->get();
}
public function createHouseholdPopulationBySourceOfDrinkingWater(Request $request)
{
$create = HouseholdPopulationBySourceOfDrinkingWater::create([
'source_of_drinking' => $request->input('source_of_drinking'),
'magnitude' => $request->input('magnitude'),
'proportion' => $request->input('proportion'),
'year' => $request->input('year'),
]);
return 'success';
}
public function updateHouseholdPopulationBySourceOfDrinkingWater(Request $request)
{
$update = HouseholdPopulationBySourceOfDrinkingWater::where('id', $request->input('id'))
->update([
'source_of_drinking' => $request->input('source_of_drinking'),
'magnitude' => $request->input('magnitude'),
'proportion' => $request->input('proportion'),
]);
return 'success';
}
public function deleteHouseholdPopulationBySourceOfDrinkingWater(Request $request)
{
$delete = HouseholdPopulationBySourceOfDrinkingWater::where('id', $request->input('id'))
->delete();
return 'success';
}
public function getPresentSourceOfPower(Request $request)
{
$year = date('o');
if($request->has('year'))
{
$year = $request->input('year');
}
return PresentSourceOfPower::where('year', $year)
->join('municipalities','municipalities.id','=','present_source_of_powers.municipality')
->select('municipalities.id as municipality','municipalities.municipality as municipality_name','present_source_of_powers.*','municipalities.district')->get();
}
public function createPresentSourceOfPower(Request $request)
{
$create = PresentSourceOfPower::create([
'power_plant' => $request->input('power_plant'),
'installed_capacity' => $request->input('installed_capacity'),
'dependable_capacity' => $request->input('dependable_capacity'),
'municipality' => $request->input('municipality'),
'year' => $request->input('year')
]);
return 'success';
}
public function updatePresentSourceOfPower(Request $request)
{
$update = PresentSourceOfPower::where('id', $request->input('id'))
->update([
'power_plant' => $request->input('power_plant'),
'installed_capacity' => $request->input('installed_capacity'),
'dependable_capacity' => $request->input('dependable_capacity'),
'municipality' => $request->input('municipality'),
'year' => $request->input('year'),
]);
return 'success';
}
public function deletePresentSourceOfPower(Request $request)
{
$delete = PresentSourceOfPower::where('id', $request->input('id'))->delete();
return 'success';
}
public function getPowerSubstation(Request $request)
{
$year = date('o');
if($request->has('year'))
{
$year = $request->input('year');
}
return PowerSubstation::where('year', $year)->get();
}
public function createPowerSubstation(Request $request)
{
$create = PowerSubstation::create([
'power_substation' => $request->input('power_substation'),
'megavolt_amperes' => $request->input('megavolt_amperes'),
'ownership' => $request->input('ownership'),
'year' => $request->input('year'),
]);
return 'success';
}
public function updatePowerSubstation(Request $request)
{
$create = PowerSubstation::where('id', $request->input('id'))
->update([
'power_substation' => $request->input('power_substation'),
'megavolt_amperes' => $request->input('megavolt_amperes'),
'ownership' => $request->input('ownership'),
]);
return 'success';
}
public function deletePowerSubstation(Request $request)
{
$delete = PowerSubstation::where('id', $request->input('id'))->delete();
return 'success';
}
public function getSatelliteTelevisionNetwork()
{
return SatelliteTelevisionNetwork::get();
}
public function createSatelliteTelevisionNetwork(Request $request)
{
$create = SatelliteTelevisionNetwork::create([
'name' => $request->input('name'),
'year' => $request->input('year'),
]);
return 'success';
}
public function updateSatelliteTelevisionNetwork(Request $request)
{
$update = SatelliteTelevisionNetwork::where('id', $request->input('id'))
->update([
'name' => $request->input('name'),
'year' => $request->input('year'),
]);
return 'success';
}
public function deleteSatelliteTelevisionNetwork(Request $request)
{
$delete = SatelliteTelevisionNetwork::where('id', $request->input('id'))->delete();
return 'success';
}
public function getRadioStation()
{
return RadioStation::get();
}
public function createRadioStation(Request $request)
{
$create = RadioStation::create([
'name' => $request->input('name'),
'year' => $request->input('year'),
]);
return 'success';
}
public function updateRadioStation(Request $request)
{
$update = RadioStation::where('id', $request->input('id'))
->update([
'name' => $request->input('name'),
'year' => $request->input('year'),
]);
return 'success';
}
public function deleteRadioStation(Request $request)
{
$delete = RadioStation::where('id', $request->input('id'))->delete();
return 'success';
}
public function getCableTelevisionNetwork()
{
return CableTelevisionNetwork::get();
}
public function createCableTelevisionNetwork(Request $request)
{
$create = CableTelevisionNetwork::create([
'name' => $request->input('name'),
'year' => $request->input('year'),
]);
return 'success';
}
public function updateCableTelevisionNetwork(Request $request)
{
$update = CableTelevisionNetwork::where('id', $request->input('id'))
->update([
'name' => $request->input('name'),
'year' => $request->input('year'),
]);
return 'success';
}
public function deleteCableTelevisionNetwork(Request $request)
{
$delete = CableTelevisionNetwork::where('id', $request->input('id'))->delete();
return 'success';
}
public function getLicensePermitIssued(Request $request)
{
$year = date('o');
if($request->has('year'))
{
$year = $request->input('year');
}
return $this->createLicensePermitIssued($year);
}
public function createLicensePermitIssued($year)
{
$check = LicensePermitIssued::where('year', $year)->get();
if(count($check) <=0)
{
LicensePermitIssued::create([
'year' => $year
]);
}
return LicensePermitIssued::where('year', $year)->first();
}
public function updateLicensePermitIssued(Request $request)
{
$update = LicensePermitIssued::where('id', $request->input('id'))
->update([
'pro_new_first_sem' => $request->input('pro_new_first_sem'),
'pro_new_second_sem' => $request->input('pro_new_second_sem'),
'pro_renew_first_sem' => $request->input('pro_renew_first_sem'),
'pro_renew_second_sem' => $request->input('pro_renew_second_sem'),
'non_pro_new_first_sem' => $request->input('non_pro_new_first_sem'),
'non_pro_new_second_sem' => $request->input('non_pro_new_second_sem'),
'non_pro_renew_first_sem' => $request->input('non_pro_renew_first_sem'),
'non_pro_renew_second_sem' => $request->input('non_pro_renew_second_sem'),
'student_first_sem' => $request->input('student_first_sem'),
'student_second_sem' => $request->input('student_second_sem'),
]);
return 'success';
}
}
<file_sep>/app/UrbanRuralPopulation.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class UrbanRuralPopulation extends Model
{
protected $fillable = [
'urban_rural_population_infos_id',
'municipality',
'no_of_barangays_urban',
'no_of_barangays_rural',
'population_urban_year_1',
'population_rural_year_1',
'population_urban_year_2',
'population_rural_year_2',
];
}
<file_sep>/database/migrations/2018_11_27_004207_create_fire_preventions_table.php
<?php
use Illuminate\Support\Facades\Schema;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Database\Migrations\Migration;
class CreateFirePreventionsTable extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create('fire_preventions', function (Blueprint $table) {
$table->increments('id');
$table->string('municipality');
$table->integer('no_of_fire_station_bfp')->default(0);
$table->integer('no_of_fire_station_lgu')->default(0);
$table->integer('no_of_fire_trucks_bfp')->default(0);
$table->integer('no_of_fire_trucks_lgu')->default(0);
$table->integer('no_of_fire_trucks_ngo')->default(0);
$table->integer('no_of_personnel')->default(0);
$table->year('year');
$table->timestamps();
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::dropIfExists('fire_preventions');
}
}
<file_sep>/app/LandClassificationStatistic.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class LandClassificationStatistic extends Model
{
protected $fillable = [
'certified_alienable_and_disposable_land',
'forest_land_classified',
'forest_land_unclassified',
'year'
];
}
<file_sep>/app/Http/Controllers/MunicipalityController.php
<?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use App\Municipality;
use App\Barangay;
class MunicipalityController extends Controller
{
public function watershed()
{
return Municipality::with_watershed();
}
public function get(){
return Municipality::get();
}
public function getMunicipalityWithPSGC()
{
return Municipality::orderBy('municipality', 'ASC')->get();
}
public function getBarangay(Request $request)
{
return Barangay::where('municipality_id', $request->input('id'))->get();
}
public function getBarangayWithPSGC()
{
return Barangay::get();
}
}
<file_sep>/database/migrations/2018_12_05_090328_create_actual_projecteds_table.php
<?php
use Illuminate\Support\Facades\Schema;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Database\Migrations\Migration;
class CreateActualProjectedsTable extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create('actual_projecteds', function (Blueprint $table) {
$table->increments('id');
$table->integer('actual_projected_infos_id')->default(0);
$table->integer('municipality')->default(0);
$table->integer('old_year_pop')->default(0);
$table->integer('old_year_hh')->default(0);
$table->integer('actual_year_pop')->default(0);
$table->integer('actual_year_hh')->default(0);
$table->integer('projected_year_pop')->default(0);
$table->integer('projected_year_hh')->default(0);
$table->integer('growth_rate_1')->default(0);
$table->integer('growth_rate_2')->default(0);
$table->timestamps();
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::dropIfExists('actual_projecteds');
}
}
<file_sep>/app/PopulationByCensusYear.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class PopulationByCensusYear extends Model
{
protected $fillable = [
'municipality',
'population',
'year',
];
}
<file_sep>/resources/assets/js/vue/components/facts_and_figure/index.js
import Vue from 'vue';
import general_information from './general_information/general_information.vue'
Vue.component("general_information", general_information)
import print from './print/print.vue'
Vue.component("print", print)
import land_area_by_municipality from './land_and_other_natural_resources/land_area_by_municipality.vue'
import land_classification_statistic from './land_and_other_natural_resources/land_classification_statistic.vue'
import geographical_zone_surface from './land_and_other_natural_resources/geographical_zone_surface.vue'
Vue.component("land_area_by_municipality", land_area_by_municipality)
Vue.component("geographical_zone_surface", geographical_zone_surface)
Vue.component("land_classification_statistic", land_classification_statistic)
import forest_cover from './land_and_other_natural_resources/forest_cover.vue'
Vue.component("forest_cover", forest_cover)
import major_watershed from './land_and_other_natural_resources/major_watershed.vue'
Vue.component("major_watershed", major_watershed)
import marine_protected_areas from './land_and_other_natural_resources/marine_protected_areas.vue'
Vue.component("marine_protected_areas", marine_protected_areas)
import established_marine_protected_areas from './land_and_other_natural_resources/established_marine_protected_areas.vue'
Vue.component("established_marine_protected_areas", established_marine_protected_areas)
import rice_area_city from './economic_profile/rice_area_city.vue'
Vue.component("rice_area_city", rice_area_city)
import irrigated_area from './economic_profile/irrigated_area.vue'
Vue.component("irrigated_area", irrigated_area)
import corn_production from './economic_profile/corn_production.vue'
Vue.component("corn_production", corn_production)
import vegetable_production from './economic_profile/vegetable_production.vue'
Vue.component("vegetable_production", vegetable_production)
import high_value_commercial_crops from './economic_profile/high_value_commercial_crops.vue'
Vue.component("high_value_commercial_crops", high_value_commercial_crops)
import accommodation_establishments from './economic_profile/accommodation_establishments.vue'
Vue.component("accommodation_establishments", accommodation_establishments)
import tourist_sites_and_destinations from './economic_profile/tourist_sites_and_destinations.vue'
Vue.component("tourist_sites_and_destinations", tourist_sites_and_destinations)
import commerce_and_industry from './economic_profile/commerce_and_industry.vue'
Vue.component("commerce_and_industry", commerce_and_industry)
import top_five_products from './economic_profile/top_five_products.vue'
Vue.component("top_five_products", top_five_products)
import top_five_fruit_areas from './economic_profile/top_five_fruit_areas.vue'
Vue.component("top_five_fruit_areas", top_five_fruit_areas)
import banks_and_atms from './economic_profile/banks_and_atms.vue'
Vue.component("banks_and_atms", banks_and_atms)
import registered_vechicle from './utilities_and_infrustructure/registered_vechicle.vue'
Vue.component("registered_vechicle", registered_vechicle)
import license_permit_issueds from './utilities_and_infrustructure/license_permit_issueds.vue'
Vue.component("license_permit_issueds", license_permit_issueds)
import communication from './utilities_and_infrustructure/communication.vue'
Vue.component("communication", communication)
import water_system from './utilities_and_infrustructure/water_system.vue'
Vue.component("water_system", water_system)
import household_population_by_source_of_drinking_waters from './utilities_and_infrustructure/household_population_by_source_of_drinking_waters.vue'
Vue.component("household_population_by_source_of_drinking_waters", household_population_by_source_of_drinking_waters)
import status_of_power from './utilities_and_infrustructure/status_of_power.vue'
Vue.component("status_of_power", status_of_power)
import energized_and_unenergized_barangay_sitios from './utilities_and_infrustructure/energized_and_unenergized_barangay_sitios.vue'
Vue.component("energized_and_unenergized_barangay_sitios", energized_and_unenergized_barangay_sitios)
import present_source_of_powers from './utilities_and_infrustructure/present_source_of_powers.vue'
Vue.component("present_source_of_powers", present_source_of_powers)
import power_substations from './utilities_and_infrustructure/power_substations.vue'
Vue.component("power_substations", power_substations)
import annual_income_budget from './financial_profile/annual_income_budget.vue'
Vue.component("annual_income_budget", annual_income_budget)
import income_and_expenditure from './financial_profile/income_and_expenditure.vue'
Vue.component("income_and_expenditure", income_and_expenditure)
import personnel_by_status from './institutional_profile/personnel_by_status.vue'
Vue.component("personnel_by_status", personnel_by_status)
import personnel_by_office from './institutional_profile/personnel_by_office.vue'
Vue.component("personnel_by_office", personnel_by_office)
// SOCIAL DEVELOPMENT
import dimensions_of_poverties from './social_development_profile/dimensions_of_poverties.vue'
Vue.component("dimensions_of_poverties", dimensions_of_poverties)
import health_morbidities from './social_development_profile/health_morbidities.vue'
Vue.component("health_morbidities", health_morbidities)
import health_infant_morbidities from './social_development_profile/health_infant_morbidities.vue'
Vue.component("health_infant_morbidities", health_infant_morbidities)
import health_mortalities from './social_development_profile/health_mortalities.vue'
Vue.component("health_mortalities", health_mortalities)
import health_infant_mortalities from './social_development_profile/health_infant_mortalities.vue'
Vue.component("health_infant_mortalities", health_infant_mortalities)
import nutritional_status from './social_development_profile/nutritional_status.vue'
Vue.component("nutritional_status", nutritional_status)
import health_facilities from './social_development_profile/health_facilities.vue'
Vue.component("health_facilities", health_facilities)
import doh_licensed_health_facilities from './social_development_profile/doh_licensed_health_facilities.vue'
Vue.component("doh_licensed_health_facilities", doh_licensed_health_facilities)
import education_facilities from './social_development_profile/education_facilities.vue'
Vue.component("education_facilities", education_facilities)
import enrollment_in_government_and_private_schools from './social_development_profile/enrollment_in_government_and_private_schools.vue'
Vue.component("enrollment_in_government_and_private_schools", enrollment_in_government_and_private_schools)
import performance_indicator_in_public_schools from './social_development_profile/performance_indicator_in_public_schools.vue'
Vue.component("performance_indicator_in_public_schools", performance_indicator_in_public_schools)
import social_welfare_services from './social_development_profile/social_welfare_services.vue'
Vue.component("social_welfare_services", social_welfare_services)
import senior_citizen_and_person_with_disabilities from './social_development_profile/senior_citizen_and_person_with_disabilities.vue'
Vue.component("senior_citizen_and_person_with_disabilities", senior_citizen_and_person_with_disabilities)
import crime_statistics_and_protective_facilities from './social_development_profile/crime_statistics_and_protective_facilities.vue'
Vue.component("crime_statistics_and_protective_facilities", crime_statistics_and_protective_facilities)
import fire_preventions from './social_development_profile/fire_preventions.vue'
Vue.component("fire_preventions", fire_preventions)
import actual_projecteds from './demograph/actual_projecteds.vue'
Vue.component("actual_projecteds", actual_projecteds)
import ancestral_domain from './demograph/ancestral_domain.vue'
Vue.component("ancestral_domain", ancestral_domain)
import household_population_by_sexes from './demograph/household_population_by_sexes.vue'
Vue.component("household_population_by_sexes", household_population_by_sexes)
import mangyan_tribes from './demograph/mangyan_tribes.vue'
Vue.component("mangyan_tribes", mangyan_tribes)
import population_by_census_year from './demograph/population_by_census_year.vue'
Vue.component("population_by_census_year", population_by_census_year)
import population_densities from './demograph/population_densities.vue'
Vue.component("population_densities", population_densities)
import projected_populations from './demograph/projected_populations.vue'
Vue.component("projected_populations", projected_populations)
import urban_rural_populations from './demograph/urban_rural_populations.vue'
Vue.component("urban_rural_populations", urban_rural_populations)<file_sep>/resources/assets/js/vue/store/index.js
import Vue from 'vue'
import Vuex from 'vuex'
import axios from 'axios'
import router from '../routes'
Vue.use(Vuex);
const store = new Vuex.Store({
state: {
USER_INFO: [],
YEAR: [],
ACCESS: [],
},
mutations:{
SET_USER_INFORMATION : function(state, info){
state.USER_INFO = info;
},
GET_YEAR: function(state)
{
var current_year = (new Date()).getFullYear() + 50;
var start_year = 1969;
var arr = [];
for (var i = start_year; i < current_year; i++) {
arr.push(i)
}
state.YEAR = arr.reverse();
}
},
actions:{
},
getters:{
}
})
export default store;<file_sep>/app/RiceAreaCity.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class RiceAreaCity extends Model
{
protected $fillable = [
"municipality",
"irrigated_area",
"rainfed_area",
"total_rice_area",
"year",
];
}
<file_sep>/app/IrrigatedArea.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class IrrigatedArea extends Model
{
protected $fillable = [
'municipality',
'ris',
'cis',
'pump',
'total',
'year',
];
}
<file_sep>/app/Http/Controllers/EconomicProfileController.php
<?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use App\RiceAreaCity;
use App\Municipality;
use App\IrrigatedArea;
use App\RiceProduction;
use App\CornProduction;
use App\VegetableProduction;
use App\CoconutProduction;
use App\HighValueCommercialCrop;
use App\AccommodationEstablishment;
use App\TouristSitesAndDestination;
use App\BanksAndAtm;
use App\CommerceAndIndustry;
use App\TopFiveProduct;
use App\TopFiveFruitArea;
class EconomicProfileController extends Controller
{
public function getRiceAreaCity(Request $request)
{
$year = date('o');
if($request->has('year'))
{
$year = $request->input('year');
}
return $this->createRiceAreaCity($year);
}
public function createRiceAreaCity($year)
{
$check = RiceAreaCity::where('year', $year)->get();
if(count($check) <= 0)
{
$municipality = Municipality::get();
foreach ($municipality as $value) {
$create = RiceAreaCity::create([
'municipality' => $value->id,
'irrigated_area' => 0,
'rainfed_area' => 0,
'total_rice_area' => 0,
'year' => $year,
]);
}
}
return RiceAreaCity::where('year', $year)
->join('municipalities','municipalities.id','=','rice_area_cities.municipality')
->select('municipalities.id as municipality','municipalities.municipality as municipality_name','rice_area_cities.*','municipalities.district')
->get();
}
public function updateRiceAreaCity(Request $request)
{
$arr = $request->input('data');
foreach ($arr as $value) {
$update = RiceAreaCity::where('id', $value['id'])
->update([
'irrigated_area' => $value["irrigated_area"],
'rainfed_area' => $value["rainfed_area"],
'total_rice_area' => $value["total_rice_area"],
]);
}
return 'success';
}
public function getIrrigatedArea(Request $request)
{
$year = date('o');
if($request->has('year'))
{
$year = $request->input('year');
}
return $this->createIrrigatedArea($year);
}
public function createIrrigatedArea($year)
{
$count = IrrigatedArea::where('year', $year)->get();
if(count($count) <= 0)
{
$municipality = Municipality::get();
foreach ($municipality as $value) {
$create = IrrigatedArea::create([
'municipality' => $value->id,
'ris' => 0,
'cis' => 0,
'pump' => 0,
'total' => 0,
'year' => $year,
]);
}
}
return IrrigatedArea::where('year', $year)
->join('municipalities','municipalities.id','=','irrigated_areas.municipality')
->select('municipalities.id as municipality','municipalities.municipality as municipality_name','irrigated_areas.*','municipalities.district')
->get();
}
public function updateIrrigatedArea(Request $request)
{
$data = $request->input('data');
foreach ($data as $value) {
$update = IrrigatedArea::where('id', $value["id"])
->update([
'ris' => $value["ris"],
'cis' => $value["cis"],
'pump' => $value["pump"],
'total' => $value["total"],
]);
}
return 'success';
}
public function getRiceProduction(Request $request)
{
$year = date('o');
if($request->has('year'))
{
$year = $request->input('year');
}
return $this->createRiceProduction($year);
}
public function createRiceProduction($year)
{
$check = RiceProduction::where('year', $year)->get();
if(count($check) <= 0)
{
foreach (Municipality::get() as $value) {
RiceProduction::create([
'municipality' => $value->id,
'no_of_farmers' => 0,
'area_harvested' => 0,
'production_mt' => 0,
'arerage_yield' => 0,
'year' => $year,
]);
}
}
return RiceProduction::where('year', $year)
->join('municipalities','municipalities.id','=','rice_productions.municipality')
->select('municipalities.id as municipality','municipalities.municipality as municipality_name','rice_productions.*','municipalities.district')
->get();
}
public function updateRiceProduction(Request $request)
{
$data = $request->input('data');
foreach ($data as $value) {
RiceProduction::where('id', $value["id"])
->update([
'municipality' => $value["municipality"],
'no_of_farmers' => $value["no_of_farmers"],
'area_harvested' => $value["area_harvested"],
'production_mt' => $value["production_mt"],
'arerage_yield' => $value["arerage_yield"],
]);
}
return 'success';
}
public function getCornProduction(Request $request)
{
$year = date('o');
if($request->has('year'))
{
$year = $request->input('year');
}
return $this->createCornProduction($year);
}
public function createCornProduction($year)
{
$check = CornProduction::where('year', $year)->get();
if(count($check) <= 0)
{
foreach (Municipality::get() as $value) {
CornProduction::create([
'municipality' => $value->id,
'no_of_farmers' => 0,
'area_harvested' => 0,
'production_mt' => 0,
'arerage_yield' => 0,
'year' => $year,
]);
}
}
return CornProduction::where('year', $year)
->join('municipalities','municipalities.id','=','corn_productions.municipality')
->select('municipalities.id as municipality','municipalities.municipality as municipality_name','corn_productions.*','municipalities.district')
->get();
}
public function updateCornProduction(Request $request)
{
$data = $request->input('data');
foreach ($data as $value) {
CornProduction::where('id', $value["id"])
->update([
'municipality' => $value["municipality"],
'no_of_farmers' => $value["no_of_farmers"],
'area_harvested' => $value["area_harvested"],
'production_mt' => $value["production_mt"],
'arerage_yield' => $value["arerage_yield"],
]);
}
return 'success';
}
public function getVegetableProduction(Request $request)
{
$year = date('o');
if($request->has('year'))
{
$year = $request->input('year');
}
return $this->createVegetableProduction($year);
}
public function createVegetableProduction($year)
{
$check = VegetableProduction::where('year', $year)->get();
if(count($check) <= 0)
{
VegetableProduction::create([
'commodity' => "Root Crops",
'area_harvested' => 0,
'production_mt' => 0,
'arerage_yield' => 0,
'year' => $year,
]);
VegetableProduction::create([
'commodity' => "Fruit Vegetables",
'area_harvested' => 0,
'production_mt' => 0,
'arerage_yield' => 0,
'year' => $year,
]);
VegetableProduction::create([
'commodity' => "Leafy Vegetables",
'area_harvested' => 0,
'production_mt' => 0,
'arerage_yield' => 0,
'year' => $year,
]);
VegetableProduction::create([
'commodity' => "Legumes",
'area_harvested' => 0,
'production_mt' => 0,
'arerage_yield' => 0,
'year' => $year,
]);
VegetableProduction::create([
'commodity' => "Condiments",
'area_harvested' => 0,
'production_mt' => 0,
'arerage_yield' => 0,
'year' => $year,
]);
}
return VegetableProduction::where('year', $year)
->get();
}
public function updateVegetableProduction(Request $request)
{
$data = $request->input('data');
foreach ($data as $value) {
VegetableProduction::where('id', $value["id"])
->update([
'area_harvested' => $value["area_harvested"],
'production_mt' => $value["production_mt"],
'arerage_yield' => $value["arerage_yield"],
]);
}
return 'success';
}
public function getCoconutProduction(Request $request)
{
$year = date('o');
if($request->has('year'))
{
$year = $request->input('year');
}
return $this->createCoconutProduction($year);
}
public function createCoconutProduction($year)
{
$check = CoconutProduction::where('year', $year)->get();
if(count($check) <= 0)
{
foreach (Municipality::get() as $value) {
CoconutProduction::create([
'municipality' => $value->id,
'no_of_coconut_farmers' => 0,
'coconut_area' => 0,
'no_of_coco_trees' => 0,
'non_bearing' => 0,
'bearing' => 0,
'nut_tree_year' => 0,
'wholenuts' => 0,
'copra_equivalent' => 0,
'year' => $year,
]);
}
}
return CoconutProduction::where('year', $year)
->join('municipalities','municipalities.id','=','coconut_productions.municipality')
->select('municipalities.id as municipality','municipalities.municipality as municipality_name','coconut_productions.*','municipalities.district')
->get();
}
public function updateCoconutProduction(Request $request)
{
$data = $request->input('data');
foreach ($data as $value) {
CoconutProduction::where('id', $value["id"])
->update([
'no_of_coconut_farmers' => $value['no_of_coconut_farmers'],
'coconut_area' => $value['coconut_area'],
'no_of_coco_trees' => $value['no_of_coco_trees'],
'non_bearing' => $value['non_bearing'],
'bearing' => $value['bearing'],
'nut_tree_year' => $value['nut_tree_year'],
'wholenuts' => $value['wholenuts'],
'copra_equivalent' => $value['copra_equivalent'],
]);
}
return 'success';
}
public function getHighValueCommercialCrop(Request $request)
{
$year = date('o');
if($request->has('year'))
{
$year = $request->input('year');
}
return HighValueCommercialCrop::where('year', $year)->get();
}
public function createHighValueCommercialCrop(Request $request)
{
$validate = $request->validate([
'commodity' => 'required',
'year' => 'required',
]);
$create = HighValueCommercialCrop::create([
'commodity' => $request->input('commodity'),
'year' => $request->input('year')
]);
}
public function updateHighValueCommercialCrop(Request $request)
{
$data = $request->input('data');
foreach ($data as $value) {
$crops = HighValueCommercialCrop::where('id', $value["id"])
->update([
'commodity' => $value["commodity"],
'farmers_served' => $value["farmers_served"],
'no_of_trees_planted' => $value["no_of_trees_planted"],
'planted_area' => $value["planted_area"],
'production' => $value["production"],
'average_yield' => $value["average_yield"],
]);
}
return 'success';
}
public function deleteHighValueCommercialCrop(Request $request)
{
$validate = $request->validate([
'id' => 'required'
]);
$delete = HighValueCommercialCrop::where('id', $request->input('id'))->delete();
}
// ACCOMODATION
public function getAccommodationEstablishment(Request $request)
{
$year = date('o');
if($request->has('year'))
{
$year = $request->input('year');
}
return $this->createAccommodationEstablishment($year);
}
public function createAccommodationEstablishment($year)
{
$check = AccommodationEstablishment::where('year', $year)->get();
if(count($check) <= 0)
{
foreach (Municipality::get() as $value) {
AccommodationEstablishment::create([
'municipality' => $value->id,
'no_of_accommodation_establishment' => 0,
'no_of_room' => 0,
'bedding_capacity' => 0,
'total_employment' => 0,
'year' => $year,
]);
}
}
return AccommodationEstablishment::where('year', $year)
->join('municipalities','municipalities.id','=','accommodation_establishments.municipality')
->select('municipalities.id as municipality','municipalities.municipality as municipality_name','accommodation_establishments.*','municipalities.district')
->get();
}
public function updateAccommodationEstablishment(Request $request)
{
$data = $request->input('data');
foreach ($data as $value) {
AccommodationEstablishment::where('id', $value["id"])
->update([
'no_of_accommodation_establishment' => $value["no_of_accommodation_establishment"],
'no_of_room' => $value["no_of_room"],
'bedding_capacity' => $value["bedding_capacity"],
'total_employment' => $value["total_employment"],
]);
}
return 'success';
}
public function getTouristSitesAndDestination(Request $request)
{
$tourist = TouristSitesAndDestination::join('municipalities','municipalities.id','=','tourist_sites_and_destinations.municipality')
->select('municipalities.id as municipality','municipalities.municipality as municipality_name','tourist_sites_and_destinations.*','municipalities.district')
->get();
$arr = [];
foreach ($tourist as $value)
{
$arr[$value->municipality_name][] = [
'id' => $value->id,
'municipality' => $value->municipality,
'destination' => $value->destination,
];
}
return $arr;
}
public function createTouristSitesAndDestination(Request $request)
{
$validate = $request->validate([
'municipality' => 'required',
'destination' => 'required',
]);
$create = TouristSitesAndDestination::create([
'municipality' => $request->input('municipality'),
'destination' => $request->input('destination'),
]);
return 'success';
}
public function updateTouristSitesAndDestination(Request $request)
{
$validate = $request->validate([
'municipality' => 'required',
'destination' => 'required',
]);
$update = TouristSitesAndDestination::where('id', $request->input('id'))
->update([
'municipality' => $request->input('municipality'),
'destination' => $request->input('destination'),
]);
return 'success';
}
public function deleteTouristSitesAndDestination(Request $request)
{
$validate = $request->validate([
'id' => 'required',
]);
$delete = TouristSitesAndDestination::where('id', $request->input('id'))->delete();
return 'success';
}
public function getBanksAndAtm(Request $request)
{
$year = date('o');
if($request->has('year'))
{
$year = $request->input('year');
}
return $this->createBanksAndAtm($year);
}
public function createBanksAndAtm($year)
{
$check = BanksAndAtm::where('year', $year)->get();
if(count($check) <= 0)
{
foreach (Municipality::get() as $value) {
BanksAndAtm::create([
'municipality' => $value->id,
'banks' => 0,
'atm' => 0,
'year' => $year,
]);
}
}
return BanksAndAtm::where('year', $year)
->join('municipalities','municipalities.id','=','banks_and_atms.municipality')
->select('municipalities.id as municipality','municipalities.municipality as municipality_name','banks_and_atms.*','municipalities.district')
->get();
}
public function updateBanksAndAtm(Request $request)
{
$data = $request->input('data');
foreach ($data as $value) {
BanksAndAtm::where('id', $value["id"])
->update([
'banks' => $value["banks"],
'atm' => $value["atm"],
]);
}
return 'success';
}
public function getCommerceAndIndustry(Request $request)
{
$year = date('o');
if($request->has('year'))
{
$year = $request->input('year');
}
return $this->createCommerceAndIndustry($year);
}
public function createCommerceAndIndustry($year)
{
$arr = ['Services','Trading','Tourism','Manufacturing','Agriculture'];
$check = CommerceAndIndustry::where('year', $year)->get();
if(count($check) <= 0)
{
foreach ($arr as $value) {
CommerceAndIndustry::create([
'sectoral_classification' => $value,
'no_of_applications' => 0,
'investments' => 0,
'employment' => 0,
'year' => $year,
]);
}
}
return CommerceAndIndustry::where('year', $year)->get();
}
public function updateCommerceAndIndustry(Request $request)
{
$data = $request->input('data');
foreach ($data as $value) {
$update = CommerceAndIndustry::where('id', $value["id"])
->update([
'sectoral_classification' => $value["sectoral_classification"],
'no_of_applications' => $value["no_of_applications"],
'investments' => $value["investments"],
'employment' => $value["employment"],
]);
}
return 'success';
}
public function getTopFiveProduct(Request $request)
{
$year = date('o');
if($request->has('year'))
{
$year = $request->input('year');
}
$topfive = TopFiveProduct::where('year', $year)
->join('municipalities','municipalities.id','=','top_five_products.municipality')
->select('municipalities.id as municipality','municipalities.municipality as municipality_name','top_five_products.*','municipalities.district')
->get();
$arr = [];
$arr["data"] = [];
$arr["product_name"] = [];
$arr["year"] = $year;
foreach ($topfive as $value) {
$arr["product_name"][$value->product] = [];
$arr["data"][$value->municipality_name][$value->product] = [
'id' => $value->id,
'product' => $value->product,
'quantity' => $value->quantity,
'year' => $value->year,
];
}
return $arr;
}
public function createTopFiveProduct(Request $request)
{
foreach (Municipality::get() as $value) {
TopFiveProduct::create([
'municipality' => $value->id,
'product' => $request->input('product'),
'quantity' => 0,
'year' => $request->input('year'),
]);
}
return 'success';
}
public function updateTopFiveProduct(Request $request)
{
$data = $request->input('data');
foreach ($data as $municipality_arr) {
foreach ($municipality_arr as $product_arr) {
TopFiveProduct::where('id', $product_arr["id"])
->update([
'quantity' => $product_arr["quantity"],
]);
}
}
return 'success';
}
public function deleteTopFiveProduct(Request $request)
{
$delete = TopFiveProduct::where([
'product' => $request->input('product'),
'year' => $request->input('year'),
])->delete();
return 'success';
}
public function getTopFiveFruitArea(Request $request)
{
$year = date('o');
if($request->has('year'))
{
$year = $request->input('year');
}
$topfive = TopFiveFruitArea::where('year', $year)
->join('municipalities','municipalities.id','=','top_five_fruit_areas.municipality')
->select('municipalities.id as municipality','municipalities.municipality as municipality_name','top_five_fruit_areas.*','municipalities.district')
->get();
$arr = [];
$arr["data"] = [];
$arr["product_name"] = [];
$arr["year"] = $year;
foreach ($topfive as $value) {
$arr["product_name"][$value->product] = [];
$arr["data"][$value->municipality_name][$value->product] = [
'id' => $value->id,
'product' => $value->product,
'quantity' => $value->quantity,
'year' => $value->year,
];
}
return $arr;
}
public function createTopFiveFruitArea(Request $request)
{
foreach (Municipality::get() as $value) {
TopFiveFruitArea::create([
'municipality' => $value->id,
'product' => $request->input('product'),
'quantity' => 0,
'year' => $request->input('year'),
]);
}
return 'success';
}
public function updateTopFiveFruitArea(Request $request)
{
$data = $request->input('data');
foreach ($data as $municipality_arr) {
foreach ($municipality_arr as $product_arr) {
TopFiveFruitArea::where('id', $product_arr["id"])
->update([
'quantity' => $product_arr["quantity"],
]);
}
}
return 'success';
}
public function deleteTopFiveFruitArea(Request $request)
{
$delete = TopFiveFruitArea::where([
'product' => $request->input('product'),
'year' => $request->input('year'),
])->delete();
return 'success';
}
}
<file_sep>/app/WaterSystem.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class WaterSystem extends Model
{
protected $fillable = [
'municipality',
'water_service_provider',
'type_of_water_facility',
'year',
];
//
}
<file_sep>/app/PopulationDensity.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class PopulationDensity extends Model
{
protected $fillable = [
'population_density_infos_id',
'municipality',
'gross_old',
'gross_actual',
'urban_old',
'urban_actual',
'estimated_gross',
];
}
<file_sep>/database/migrations/2018_11_13_025317_create_enrollment_in_government_and_private_schools_table.php
<?php
use Illuminate\Support\Facades\Schema;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Database\Migrations\Migration;
class CreateEnrollmentInGovernmentAndPrivateSchoolsTable extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create('enrollment_in_government_and_private_schools', function (Blueprint $table) {
$table->increments('id');
$table->string('educational_level');
$table->integer('province_public')->default(0);
$table->integer('province_private')->default(0);
$table->integer('calapan_public')->default(0);
$table->integer('calapan_private')->default(0);
$table->integer('calapan_luc_suc')->default(0);
$table->year('year');
$table->timestamps();
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::dropIfExists('enrollment_in_government_and_private_schools');
}
}
<file_sep>/app/RegisteredVehicle.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class RegisteredVehicle extends Model
{
protected $fillable = [
'cars',
'suv_uv',
'tricycle',
'truck',
'truck_bus',
'motorcycle',
'uv_jeep',
'month',
'year',
];
public function getMonthAttribute($value)
{
return date("F", mktime(0, 0, 0, $value, 10));
}
}
<file_sep>/app/ComparativeEmploymentStatus.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class ComparativeEmploymentStatus extends Model
{
protected $fillable = [
'labor_employment',
'number',
'year',
];
}
<file_sep>/app/MajorWatershed.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class MajorWatershed extends Model
{
protected $fillable = [
'name',
'location',
'classification',
'area',
'year'
];
}
<file_sep>/app/ActualProjected.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class ActualProjected extends Model
{
protected $fillable = [
'actual_projected_infos_id',
'municipality',
'old_year_pop',
'old_year_hh',
'actual_year_pop',
'actual_year_hh',
'projected_year_pop',
'projected_year_hh',
'growth_rate_1',
'growth_rate_2',
];
}
<file_sep>/app/NonMetallicMineral.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class NonMetallicMineral extends Model
{
protected $fillable = [
'municipality',
'cubic_meter',
'amount',
'year',
];
}
<file_sep>/app/SeniorCitizenAndPersonWithDisability.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class SeniorCitizenAndPersonWithDisability extends Model
{
protected $fillable = [
'municipality',
'senior_male',
'senior_female',
'disability_male',
'disability_female',
'year',
];
}
<file_sep>/app/Http/Controllers/GeneralInformationController.php
<?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use App\GeneralInformation;
class GeneralInformationController extends Controller
{
public function get(){
return GeneralInformation::get();
}
public function getYear(Request $request)
{
return GeneralInformation::first();
}
public function insert(Request $request){
// // $this->validator($request);
// $testInfo = GeneralInformation::where('year', $request->input('year'))
// ->first();
// if($testInfo)
// {
// $this->update($request);
// }else{
// $create = GeneralInformation::create([
// 'location' => $request->input('location'),
// 'topography' => $request->input('topography'),
// 'land_area' => $request->input('land_area'),
// 'boundary' => $request->input('boundary'),
// 'climate' => $request->input('climate'),
// 'capital' => $request->input('capital'),
// 'date_of_approval' => $request->input('date_of_approval'),
// 'legal_basis' => $request->input('legal_basis'),
// 'no_of_component_city' => $request->input('no_of_component_city') ?? 0,
// 'no_of_municipality' => $request->input('no_of_municipality') ?? 0,
// 'no_of_congressional_district' => $request->input('no_of_congressional_district') ?? 0,
// 'no_of_barangay' => $request->input('no_of_barangay') ?? 0,
// 'year' => $request->input('year'),
// ]);
// }
}
public function update(Request $request){
// $this->validator($request);
$update = GeneralInformation::where('id', $request->input('id'))
->update([
'location' => $request->input('location'),
'topography' => $request->input('topography'),
'land_area' => $request->input('land_area'),
'boundary' => $request->input('boundary'),
'climate' => $request->input('climate'),
'capital' => $request->input('capital'),
'date_of_approval' => $request->input('date_of_approval'),
'legal_basis' => $request->input('legal_basis'),
'no_of_component_city' => $request->input('no_of_component_city'),
'no_of_municipality' => $request->input('no_of_municipality'),
'no_of_congressional_district' => $request->input('no_of_congressional_district'),
'no_of_barangay' => $request->input('no_of_barangay'),
]);
}
public function validator($arr)
{
$arr->validate([
'location' => 'required',
'topography' => 'required',
'land_area' => 'required',
'boundary' => 'required',
'climate' => 'required',
'capital' => 'required',
'date_of_approval' => 'required',
'legal_basis' => 'required',
'no_of_component_city' => 'required',
'no_of_municipality' => 'required',
'no_of_congressional_district' => 'required',
'no_of_barangay' => 'required',
'year' => 'required',
]);
}
public function remove(Request $request){
$request->validate([
'year' => 'required',
]);
$delete = GeneralInformation::where('year', $request->input('year'))
->delete();
}
}
<file_sep>/routes/web.php
<?php
/*
|--------------------------------------------------------------------------
| Web Routes
|--------------------------------------------------------------------------
|
| Here is where you can register web routes for your application. These
| routes are loaded by the RouteServiceProvider within a group which
| contains the "web" middleware group. Now create something great!
|
*/
Auth::routes();
Route::get('/', function () {
return view('welcome');
})->middleware('auth');
Route::get('/error', function () {
return "success";
});
Route::post('/test/upload', 'FileController@test_upload');
Route::group(['middleware' => ['access:admin','auth']], function(){
Route::get('/user', function () {return view('welcome');});
});
Route::post('/get_addresses', 'HomeController@get_addresses');
Route::post('/generate_word', 'HomeController@generate_word');
Route::post('/uploadCBMS', 'ExcelController@uploadCBMS');
Route::get('/addBarangay', 'ExcelController@addBarangay');
// SOCIAL DEVELOPMENT
Route::post('/social_development/comparative_labor_force_participation/create', 'SocialDevelopmentController@createComparativeLaborForce');
Route::post('/social_development/comparative_labor_force_participation/get', 'SocialDevelopmentController@getComparativeLaborForce');
Route::post('/social_development/comparative_labor_force_participation/udate', 'SocialDevelopmentController@updateComparativeLaborForce');
Route::post('/social_development/comparative_employment_statuses/create', 'SocialDevelopmentController@createComparativeEmploymentStatus');
Route::post('/social_development/comparative_employment_statuses/get', 'SocialDevelopmentController@getComparativeEmploymentStatus');
Route::post('/social_development/comparative_employment_statuses/udate', 'SocialDevelopmentController@updateComparativeEmploymentStatus');
Route::post('/social_development/poverty_incidences/create', 'SocialDevelopmentController@createPovertyIncidence');
Route::post('/social_development/poverty_incidences/get', 'SocialDevelopmentController@getPovertyIncidence');
Route::post('/social_development/poverty_incidences/udate', 'SocialDevelopmentController@updatePovertyIncidence');
Route::post('/social_development/number_rate_bdimds/get', 'SocialDevelopmentController@getNumberRateBdimd');
Route::post('/social_development/number_rate_bdimds/create', 'SocialDevelopmentController@createNumberRateBdimd');
Route::post('/social_development/number_rate_bdimds/udate', 'SocialDevelopmentController@updateNumberRateBdimd');
Route::post('/social_development/doh_licensed_health_facilities/get', 'SocialDevelopmentController@getDohLicensedHealthFacility');
Route::post('/social_development/doh_licensed_health_facilities/create', 'SocialDevelopmentController@createDohLicensedHealthFacility');
Route::post('/social_development/doh_licensed_health_facilities/update', 'SocialDevelopmentController@updateDohLicensedHealthFacility');
Route::post('/social_development/doh_licensed_health_facilities/delete', 'SocialDevelopmentController@deleteDohLicensedHealthFacility');
Route::post('/social_development/nutritional_status/get', 'SocialDevelopmentController@getNutritionalStatus');
Route::post('/social_development/nutritional_status/update', 'SocialDevelopmentController@updateNutritionalStatus');
Route::post('/social_development/health_facilities/get', 'SocialDevelopmentController@getHealthFacility');
Route::post('/social_development/health_facilities/update', 'SocialDevelopmentController@updateHealthFacility');
Route::post('/social_development/health_personnels/get', 'SocialDevelopmentController@getHealthPersonnel');
Route::post('/social_development/health_personnels/update', 'SocialDevelopmentController@updateHealthPersonnel');
Route::post('/social_development/senior_citizen_and_person_with_disabilities/get', 'SocialDevelopmentController@getSeniorCitizenAndPersonWithDisability');
Route::post('/social_development/senior_citizen_and_person_with_disabilities/update', 'SocialDevelopmentController@updateSeniorCitizenAndPersonWithDisability');
Route::post('/social_development/crime_statistics_and_protective_facilities/get', 'SocialDevelopmentController@getCrimeStatisticsAndProtectiveFacility');
Route::post('/social_development/crime_statistics_and_protective_facilities/update', 'SocialDevelopmentController@updateCrimeStatisticsAndProtectiveFacility');
Route::post('/social_development/education_facilities/get', 'SocialDevelopmentController@getEducationFacilities');
Route::post('/social_development/education_facilities/create', 'SocialDevelopmentController@createEducationFacilities');
Route::post('/social_development/education_facilities/update', 'SocialDevelopmentController@updateEducationFacilities');
Route::post('/social_development/education_facilities/delete', 'SocialDevelopmentController@deleteEducationFacilities');
Route::post('/social_development/enrollment_in_government_and_private_schools/get', 'SocialDevelopmentController@getEnrollmentInGovernmentAndPrivateSchool');
Route::post('/social_development/enrollment_in_government_and_private_schools/update', 'SocialDevelopmentController@updateEnrollmentInGovernmentAndPrivateSchool');
Route::post('/social_development/performance_indicator_in_public_schools/get', 'SocialDevelopmentController@getPerformanceIndicatorInPublicSchool');
Route::post('/social_development/performance_indicator_in_public_schools/update', 'SocialDevelopmentController@updatePerformanceIndicatorInPublicSchool');
Route::post('/social_development/health_morbidities/get', 'SocialDevelopmentController@getHealthMorbidity');
Route::post('/social_development/health_morbidities/create', 'SocialDevelopmentController@createHealthMorbidity');
Route::post('/social_development/health_morbidities/update', 'SocialDevelopmentController@updateHealthMorbidity');
Route::post('/social_development/health_morbidities/delete', 'SocialDevelopmentController@deleteHealthMorbidity');
Route::post('/social_development/health_infant_morbidities/get', 'SocialDevelopmentController@getHealthInfantMorbidity');
Route::post('/social_development/health_infant_morbidities/create', 'SocialDevelopmentController@createHealthInfantMorbidity');
Route::post('/social_development/health_infant_morbidities/update', 'SocialDevelopmentController@updateHealthInfantMorbidity');
Route::post('/social_development/health_infant_morbidities/delete', 'SocialDevelopmentController@deleteHealthInfantMorbidity');
Route::post('/social_development/health_mortalities/get', 'SocialDevelopmentController@getHealthMortality');
Route::post('/social_development/health_mortalities/create', 'SocialDevelopmentController@createHealthMortality');
Route::post('/social_development/health_mortalities/update', 'SocialDevelopmentController@updateHealthMortality');
Route::post('/social_development/health_mortalities/delete', 'SocialDevelopmentController@deleteHealthMortality');
Route::post('/social_development/health_infant_mortalities/get', 'SocialDevelopmentController@getHealthInfantMortality');
Route::post('/social_development/health_infant_mortalities/create', 'SocialDevelopmentController@createHealthInfantMortality');
Route::post('/social_development/health_infant_mortalities/update', 'SocialDevelopmentController@updateHealthInfantMortality');
Route::post('/social_development/health_infant_mortalities/delete', 'SocialDevelopmentController@deleteHealthInfantMortality');
Route::post('/social_development/dimensions_of_poverties/get', 'SocialDevelopmentController@getDimensionsOfPoverty');
Route::post('/social_development/dimensions_of_poverties/update', 'SocialDevelopmentController@updateDimensionsOfPoverty');
Route::post('/social_development/fire_preventions/get', 'SocialDevelopmentController@getFirePrevention');
Route::post('/social_development/fire_preventions/update', 'SocialDevelopmentController@updateFirePrevention');
Route::post('/social_development/social_welfare_services/get', 'SocialDevelopmentController@getSocialWelfareService');
Route::post('/social_development/social_welfare_services/update', 'SocialDevelopmentController@updateSocialWelfareService');
Route::post('/auth/info', 'UserController@getAuth');
// User link
Route::post('/user/create', 'UserController@create');
Route::post('/user/list', 'UserController@getUserList');
// USER ACCESS LIST
Route::post('/user/privilege/new', 'UserController@create_new_access');
Route::post('/user/privilege/get', 'UserController@get_all_access');
Route::post('/user/privilege/delete', 'UserController@delete_access');
// ADD USER PRIVILEGE
Route::post('/user/access/get', 'UserController@get_user_access');
Route::post('/user/access/add', 'UserController@add_user_access');
Route::post('/user/access/remove', 'UserController@remove_user_access');
Route::post('/user/change_password', 'UserController@changePassword');
Route::get('/mail', 'MailController@test_mail');
Route::get('/home', 'HomeController@index')->name('home');
// ADD USER PRIVILEGE
Route::post('/project/get', 'ProjectManagementController@get');
Route::post('/project/create', 'ProjectManagementController@create');
Route::post('/project/update', 'ProjectManagementController@update');
Route::post('/project/delete', 'ProjectManagementController@delete');
Route::post('/project/find', 'ProjectManagementController@find');
Route::post('/project/monitor/update', 'ProjectManagementController@updateMonitor');
Route::post('/project/monitoring_team', 'ProjectManagementController@getMonitoringList');
Route::post('/project/implementor_agency_lists/get', 'ProjectManagementController@getImplementorAgencyList');
Route::post('/project/update/link', 'ProjectManagementController@updateLink');
Route::post('/project/report/print', 'ProjectManagementController@print_report');
Route::post('/project/upload/attached_file', 'ProjectManagementController@uploadAttachedFiles');
Route::post('/project/get/attached_file', 'ProjectManagementController@getAttachedFile');
Route::post('/project/upload/remove_file', 'ProjectManagementController@removeFile');
Route::post('/project/get/getFundSourceYear', 'ProjectManagementController@getFundSourceYear');
Route::post('/project/get/getInfo', 'ProjectManagementController@getInfo');
Route::post('/general_information/insert', 'GeneralInformationController@insert');
Route::post('/general_information/update', 'GeneralInformationController@update');
Route::post('/general_information/getYear', 'GeneralInformationController@getYear');
Route::post('/general_information/remove', 'GeneralInformationController@remove');
// MUNICIPALITY
Route::post('/land_area_municipality/get', 'LandAndOtherNaturalResourcesController@getLandAreaByMunicipality');
Route::post('/land_area_municipality/update', 'LandAndOtherNaturalResourcesController@updateLandAreaMuniticipalty');
// Land Classification statistics
Route::post('/land_classification_statistics/get', 'LandAndOtherNaturalResourcesController@getLandClassificationStatistics');
Route::post('/land_classification_statistics/update', 'LandAndOtherNaturalResourcesController@updateLandClassificationStatistics');
Route::post('/geographical_zone_surface/get', 'LandAndOtherNaturalResourcesController@getGeographicalZoneSurfaces');
Route::post('/geographical_zone_surface/update', 'LandAndOtherNaturalResourcesController@updateGeographicalZoneSurfaces');
Route::post('/non_metallic_minerals/get', 'LandAndOtherNaturalResourcesController@getNonMetallicMineral');
Route::post('/non_metallic_minerals/update', 'LandAndOtherNaturalResourcesController@updateNonMetallicMineral');
Route::post('/major_watershed/get', 'LandAndOtherNaturalResourcesController@getMajorWatershed');
Route::post('/major_watershed/create', 'LandAndOtherNaturalResourcesController@createMajorWatershed');
Route::post('/major_watershed/update', 'LandAndOtherNaturalResourcesController@updateMajorWatershed');
Route::post('/major_watershed/delete', 'LandAndOtherNaturalResourcesController@deleteMajorWatershed');
Route::post('/coastal_resources/get', 'LandAndOtherNaturalResourcesController@getCoastalResource');
Route::post('/coastal_resources/update', 'LandAndOtherNaturalResourcesController@updateCoastalResource');
Route::post('/established_marine_protected_areas/get', 'LandAndOtherNaturalResourcesController@getEstablishedMarineProtectedArea');
Route::post('/established_marine_protected_areas/create', 'LandAndOtherNaturalResourcesController@createEstablishedMarineProtectedArea');
Route::post('/established_marine_protected_areas/update', 'LandAndOtherNaturalResourcesController@updateEstablishedMarineProtectedArea');
Route::post('/established_marine_protected_areas/delete', 'LandAndOtherNaturalResourcesController@deleteEstablishedMarineProtectedArea');
Route::post('/land_and_natural_resources/forest_covers/get', 'LandAndOtherNaturalResourcesController@getForestCover');
Route::post('/land_and_natural_resources/forest_covers/update', 'LandAndOtherNaturalResourcesController@updateForestCover');
Route::post('/land_and_natural_resources/metallic_mineral_resources/get', 'LandAndOtherNaturalResourcesController@getMetallicMineralResource');
Route::post('/land_and_natural_resources/metallic_mineral_resources/create', 'LandAndOtherNaturalResourcesController@createMetallicMineralResource');
Route::post('/land_and_natural_resources/metallic_mineral_resources/update', 'LandAndOtherNaturalResourcesController@updateMetallicMineralResource');
Route::post('/land_and_natural_resources/metallic_mineral_resources/delete', 'LandAndOtherNaturalResourcesController@deleteMetallicMineralResource');
Route::post('/municipality/watershed', 'MunicipalityController@watershed');
Route::post('/municipality/get', 'MunicipalityController@get');
Route::post('/municipality/getMunicipalityWithPSGC', 'MunicipalityController@getMunicipalityWithPSGC');
Route::post('/municipality/barangay', 'MunicipalityController@getBarangay');
Route::post('/municipality/getBarangayWithPSGC', 'MunicipalityController@getBarangayWithPSGC');
Route::post('/personnel/office/get', 'InstitutionalProfileController@getPersonnelByOffice');
Route::post('/personnel/office/update', 'InstitutionalProfileController@updatePersonnelByOffice');
Route::post('/personnel/status/get', 'InstitutionalProfileController@getPersonnelByStatus');
Route::post('/personnel/status/update', 'InstitutionalProfileController@updatePersonnelByStatus');
Route::post('/financial_profile/annual_budget/get', 'FinancialProfileController@getAnnualIncomeBudget');
Route::post('/financial_profile/annual_budget/update', 'FinancialProfileController@updateAnnualIncomeBudget');
Route::post('/financial_profile/income_and_expenditure/get', 'FinancialProfileController@getIncomeAndExpenditure');
Route::post('/financial_profile/income_and_expenditure/update', 'FinancialProfileController@updateIncomeAndExpenditure');
Route::post('/infrustructure_and_utilities/satellite_television_networks/get', 'InfrastructureUtilitiesController@getSatelliteTelevisionNetwork');
Route::post('/infrustructure_and_utilities/satellite_television_networks/create', 'InfrastructureUtilitiesController@createSatelliteTelevisionNetwork');
Route::post('/infrustructure_and_utilities/satellite_television_networks/update', 'InfrastructureUtilitiesController@updateSatelliteTelevisionNetwork');
Route::post('/infrustructure_and_utilities/satellite_television_networks/delete', 'InfrastructureUtilitiesController@deleteSatelliteTelevisionNetwork');
Route::post('/infrustructure_and_utilities/radio_stations/get', 'InfrastructureUtilitiesController@getRadioStation');
Route::post('/infrustructure_and_utilities/radio_stations/create', 'InfrastructureUtilitiesController@createRadioStation');
Route::post('/infrustructure_and_utilities/radio_stations/update', 'InfrastructureUtilitiesController@updateRadioStation');
Route::post('/infrustructure_and_utilities/radio_stations/delete', 'InfrastructureUtilitiesController@deleteRadioStation');
Route::post('/infrustructure_and_utilities/cable_television_networks/get', 'InfrastructureUtilitiesController@getCableTelevisionNetwork');
Route::post('/infrustructure_and_utilities/cable_television_networks/create', 'InfrastructureUtilitiesController@createCableTelevisionNetwork');
Route::post('/infrustructure_and_utilities/cable_television_networks/update', 'InfrastructureUtilitiesController@updateCableTelevisionNetwork');
Route::post('/infrustructure_and_utilities/cable_television_networks/delete', 'InfrastructureUtilitiesController@deleteCableTelevisionNetwork');
Route::post('/infrustructure_and_utilities/registered_vechicle/get', 'InfrastructureUtilitiesController@getRegisteredVehicle');
Route::post('/infrustructure_and_utilities/registered_vechicle/update', 'InfrastructureUtilitiesController@updateRegisteredVehicle');
Route::post('/infrustructure_and_utilities/communication/get', 'InfrastructureUtilitiesController@getCommunication');
Route::post('/infrustructure_and_utilities/communication/update', 'InfrastructureUtilitiesController@updateCommunication');
Route::post('/infrustructure_and_utilities/status_of_power/get', 'InfrastructureUtilitiesController@getStatusOfPower');
Route::post('/infrustructure_and_utilities/status_of_power/update', 'InfrastructureUtilitiesController@updateStatusOfPower');
Route::post('/infrustructure_and_utilities/energized_and_unenergized_barangay_sitios/get', 'InfrastructureUtilitiesController@getEnergizedUnergizedBarangay');
Route::post('/infrustructure_and_utilities/energized_and_unenergized_barangay_sitios/update', 'InfrastructureUtilitiesController@updateEnergizedUnergizedBarangay');
Route::post('/infrustructure_and_utilities/water_system/get', 'InfrastructureUtilitiesController@getWaterSystem');
Route::post('/infrustructure_and_utilities/water_system/create', 'InfrastructureUtilitiesController@createWaterSystem');
Route::post('/infrustructure_and_utilities/water_system/update', 'InfrastructureUtilitiesController@updateWaterSystem');
Route::post('/infrustructure_and_utilities/household_population_by_source_of_drinking_waters/get', 'InfrastructureUtilitiesController@getHouseholdPopulationBySourceOfDrinkingWater');
Route::post('/infrustructure_and_utilities/household_population_by_source_of_drinking_waters/create', 'InfrastructureUtilitiesController@createHouseholdPopulationBySourceOfDrinkingWater');
Route::post('/infrustructure_and_utilities/household_population_by_source_of_drinking_waters/update', 'InfrastructureUtilitiesController@updateHouseholdPopulationBySourceOfDrinkingWater');
Route::post('/infrustructure_and_utilities/household_population_by_source_of_drinking_waters/delete', 'InfrastructureUtilitiesController@deleteHouseholdPopulationBySourceOfDrinkingWater');
Route::post('/infrustructure_and_utilities/present_source_of_powers/get', 'InfrastructureUtilitiesController@getPresentSourceOfPower');
Route::post('/infrustructure_and_utilities/present_source_of_powers/create', 'InfrastructureUtilitiesController@createPresentSourceOfPower');
Route::post('/infrustructure_and_utilities/present_source_of_powers/update', 'InfrastructureUtilitiesController@updatePresentSourceOfPower');
Route::post('/infrustructure_and_utilities/present_source_of_powers/delete', 'InfrastructureUtilitiesController@deletePresentSourceOfPower');
Route::post('/infrustructure_and_utilities/power_substations/get', 'InfrastructureUtilitiesController@getPowerSubstation');
Route::post('/infrustructure_and_utilities/power_substations/create', 'InfrastructureUtilitiesController@createPowerSubstation');
Route::post('/infrustructure_and_utilities/power_substations/update', 'InfrastructureUtilitiesController@updatePowerSubstation');
Route::post('/infrustructure_and_utilities/power_substations/delete', 'InfrastructureUtilitiesController@deletePowerSubstation');
Route::post('/infrustructure_and_utilities/license_permit_issueds/get', 'InfrastructureUtilitiesController@getLicensePermitIssued');
Route::post('/infrustructure_and_utilities/license_permit_issueds/update', 'InfrastructureUtilitiesController@updateLicensePermitIssued');
// DEMOGRAPH
Route::post('/demograph/ancestral_domain/get', 'DemographController@getAncestralDomain');
Route::post('/demograph/ancestral_domain/create', 'DemographController@createAncestralDomain');
Route::post('/demograph/ancestral_domain/update', 'DemographController@updateAncestralDomain');
Route::post('/demograph/ancestral_domain/delete', 'DemographController@deleteAncestralDomain');
Route::post('/demograph/population_by_census_year/create', 'DemographController@createPopulationByCensusYear');
Route::post('/demograph/population_by_census_year/get', 'DemographController@getPopulationByCensusYear');
Route::post('/demograph/population_by_census_year/udate', 'DemographController@updatePopulationByCensusYear');
Route::post('/demograph/household_population_by_sexes/create', 'DemographController@createHouseholdPopulationBySex');
Route::post('/demograph/household_population_by_sexes/get', 'DemographController@getHouseholdPopulationBySex');
Route::post('/demograph/household_population_by_sexes/update', 'DemographController@updateHouseholdPopulationBySex');
Route::post('/demograph/mangyan_tribes/get', 'DemographController@getMangyanTribe');
Route::post('/demograph/mangyan_tribes/create', 'DemographController@createMangyanTribe');
Route::post('/demograph/mangyan_tribes/update', 'DemographController@updateMangyanTribe');
Route::post('/demograph/mangyan_tribes/delete', 'DemographController@deleteMangyanTribe');
Route::post('/demograph/actual_projecteds/get', 'DemographController@getActualProjected');
Route::post('/demograph/actual_projecteds/getYear', 'DemographController@getYearActualProjected');
Route::post('/demograph/actual_projecteds/create', 'DemographController@createActualProjected');
Route::post('/demograph/actual_projecteds/update', 'DemographController@updateActualProjected');
Route::post('/demograph/population_densities/get', 'DemographController@getPopulationDensity');
Route::post('/demograph/population_densities/getYear', 'DemographController@getYearPopulationDensity');
Route::post('/demograph/population_densities/create', 'DemographController@createPopulationDensity');
Route::post('/demograph/population_densities/update', 'DemographController@updatePopulationDensity');
Route::post('/demograph/projected_populations/get', 'DemographController@getProjectedPopulation');
Route::post('/demograph/projected_populations/create', 'DemographController@createProjectedPopulation');
Route::post('/demograph/projected_populations/update', 'DemographController@updateProjectedPopulation');
Route::post('/demograph/urban_rural_populations/get', 'DemographController@getUrbanRuralPopulation');
Route::post('/demograph/urban_rural_populations/getYear', 'DemographController@getYearUrbanRuralPopulation');
Route::post('/demograph/urban_rural_populations/create', 'DemographController@createUrbanRuralPopulation');
Route::post('/demograph/urban_rural_populations/update', 'DemographController@updateUrbanRuralPopulation');
// ECONOMIC PROFILE
Route::post('/economic_profile/rice_area_by_city/get', 'EconomicProfileController@getRiceAreaCity');
Route::post('/economic_profile/rice_area_by_city/update', 'EconomicProfileController@updateRiceAreaCity');
Route::post('/economic_profile/irrigated_area/get', 'EconomicProfileController@getIrrigatedArea');
Route::post('/economic_profile/irrigated_area/update', 'EconomicProfileController@updateIrrigatedArea');
Route::post('/economic_profile/rice_production/get', 'EconomicProfileController@getRiceProduction');
Route::post('/economic_profile/rice_production/update', 'EconomicProfileController@updateRiceProduction');
Route::post('/economic_profile/corn_production/get', 'EconomicProfileController@getCornProduction');
Route::post('/economic_profile/corn_production/update', 'EconomicProfileController@updateCornProduction');
Route::post('/economic_profile/vegetable_production/get', 'EconomicProfileController@getVegetableProduction');
Route::post('/economic_profile/vegetable_production/update', 'EconomicProfileController@updateVegetableProduction');
Route::post('/economic_profile/coconut_production/get', 'EconomicProfileController@getCoconutProduction');
Route::post('/economic_profile/coconut_production/update', 'EconomicProfileController@updateCoconutProduction');
Route::post('/economic_profile/accommodation_establishments/get', 'EconomicProfileController@getAccommodationEstablishment');
Route::post('/economic_profile/accommodation_establishments/update', 'EconomicProfileController@updateAccommodationEstablishment');
Route::post('/economic_profile/banks_and_atms/get', 'EconomicProfileController@getBanksAndAtm');
Route::post('/economic_profile/banks_and_atms/update', 'EconomicProfileController@updateBanksAndAtm');
Route::post('/economic_profile/high_value_commercial_crops/get', 'EconomicProfileController@getHighValueCommercialCrop');
Route::post('/economic_profile/high_value_commercial_crops/create', 'EconomicProfileController@createHighValueCommercialCrop');
Route::post('/economic_profile/high_value_commercial_crops/update', 'EconomicProfileController@updateHighValueCommercialCrop');
Route::post('/economic_profile/high_value_commercial_crops/delete', 'EconomicProfileController@deleteHighValueCommercialCrop');
Route::post('/economic_profile/tourist_sites_and_destinations/get', 'EconomicProfileController@getTouristSitesAndDestination');
Route::post('/economic_profile/tourist_sites_and_destinations/create', 'EconomicProfileController@createTouristSitesAndDestination');
Route::post('/economic_profile/tourist_sites_and_destinations/update', 'EconomicProfileController@updateTouristSitesAndDestination');
Route::post('/economic_profile/tourist_sites_and_destinations/delete', 'EconomicProfileController@deleteTouristSitesAndDestination');
// COMMERCE AND INDUSTRY
Route::post('/economic_profile/commerce_and_industry/get', 'EconomicProfileController@getCommerceAndIndustry');
Route::post('/economic_profile/commerce_and_industry/update', 'EconomicProfileController@updateCommerceAndIndustry');
Route::post('/economic_profile/top_five_products/get', 'EconomicProfileController@getTopFiveProduct');
Route::post('/economic_profile/top_five_products/create', 'EconomicProfileController@createTopFiveProduct');
Route::post('/economic_profile/top_five_products/update', 'EconomicProfileController@updateTopFiveProduct');
Route::post('/economic_profile/top_five_products/delete', 'EconomicProfileController@deleteTopFiveProduct');
Route::post('/economic_profile/top_five_fruit_areas/get', 'EconomicProfileController@getTopFiveFruitArea');
Route::post('/economic_profile/top_five_fruit_areas/create', 'EconomicProfileController@createTopFiveFruitArea');
Route::post('/economic_profile/top_five_fruit_areas/update', 'EconomicProfileController@updateTopFiveFruitArea');
Route::post('/economic_profile/top_five_fruit_areas/delete', 'EconomicProfileController@deleteTopFiveFruitArea');
// VUE ROUTE CATCHER
Route::get('/{vue_capture?}', function () { return view('welcome'); })->where('vue_capture', '[\/\w\.-]*')->middleware('auth');;
<file_sep>/database/migrations/2018_12_06_084043_create_projected_population_infos_table.php
<?php
use Illuminate\Support\Facades\Schema;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Database\Migrations\Migration;
class CreateProjectedPopulationInfosTable extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create('projected_population_infos', function (Blueprint $table) {
$table->increments('id');
$table->year('actual_year_1');
$table->year('actual_year_2');
$table->string('growth_rate');
$table->year('projection_year_1');
$table->year('projection_year_2');
$table->year('projection_year_3');
$table->year('year_of_encoding');
$table->timestamps();
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::dropIfExists('projected_population_infos');
}
}
<file_sep>/app/PopulationDensityInfo.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class PopulationDensityInfo extends Model
{
protected $fillable = [
'old_year',
'actual_year',
'estimated',
];
}
<file_sep>/app/Http/Controllers/DemographController.php
<?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use App\AncestralDomain;
use App\PopulationByCensusYear;
use App\Municipality;
use App\HouseholdPopulationBySex;
use App\MangyanTribe;
use App\ActualProjected;
use App\ActualProjectedInfo;
use App\PopulationDensity;
use App\PopulationDensityInfo;
use App\ProjectedPopulationInfo;
use App\ProjectedPopulation;
use App\UrbanRuralPopulation;
use App\UrbanRuralPopulationInfo;
class DemographController extends Controller
{
public function getYearUrbanRuralPopulation()
{
return UrbanRuralPopulationInfo::get();
}
public function getUrbanRuralPopulation(Request $request){
// urban_rural_populations
// urban_rural_population_infos
$urban_rural_population_infos = UrbanRuralPopulationInfo::where('year_of_encoding', $request->input('actual_year'))->first();
if($urban_rural_population_infos)
{
$urban_rural_populations = UrbanRuralPopulation::where('urban_rural_population_infos_id', $urban_rural_population_infos->id)
->join('municipalities','municipalities.id','=','urban_rural_populations.municipality')
->select('municipalities.id as municipality','municipalities.municipality as municipality_name','urban_rural_populations.*','municipalities.district')
->get();
}else{
$urban_rural_populations = [];
$urban_rural_population_infos = [];
}
return [
'urban_rural_populations' => $urban_rural_populations,
'urban_rural_population_infos' => $urban_rural_population_infos,
];
}
public function createUrbanRuralPopulation(Request $request){
$create = UrbanRuralPopulationInfo::create([
'population_year_1' => $request->input('population_year_1'),
'population_year_2' => $request->input('population_year_2'),
'year_of_encoding' => $request->input('year_of_encoding'),
]);
foreach (Municipality::get() as $value) {
$info = UrbanRuralPopulation::create([
'urban_rural_population_infos_id' => $create->id,
'municipality' => $value->id,
'no_of_barangays_urban' => 0,
'no_of_barangays_rural' => 0,
'population_urban_year_1' => 0,
'population_rural_year_1' => 0,
'population_urban_year_2' => 0,
'population_rural_year_2' => 0,
]);
}
return 'success';
}
public function updateUrbanRuralPopulation(Request $request){
$data = $request->input('data');
foreach ($data as $value) {
$update = UrbanRuralPopulation::where('id', $value["id"])
->update([
'no_of_barangays_urban' => $value["no_of_barangays_urban"],
'no_of_barangays_rural' => $value["no_of_barangays_rural"],
'population_urban_year_1' => $value["population_urban_year_1"],
'population_rural_year_1' => $value["population_rural_year_1"],
'population_urban_year_2' => $value["population_urban_year_2"],
'population_rural_year_2' => $value["population_rural_year_2"],
]);
}
return 'success';
}
public function getProjectedPopulation(Request $request)
{
$projected_population_infos = ProjectedPopulationInfo::where('year_of_encoding', $request->input('actual_year'))->first();
if($projected_population_infos)
{
$projected_populations = ProjectedPopulation::where('projected_population_infos_id', $projected_population_infos->id)->get();
}
else
{
$projected_population_infos = [];
$projected_populations = [];
}
return [
'projected_population_infos' => $projected_population_infos,
'projected_populations' => $projected_populations,
];
}
public function createProjectedPopulation(Request $request)
{
$create = ProjectedPopulationInfo::create([
'actual_year_1' => $request->input('actual_year_1'),
'actual_year_2' => $request->input('actual_year_2'),
'growth_rate' => $request->input('growth_rate'),
'projection_year_1' => $request->input('projection_year_1'),
'projection_year_2' => $request->input('projection_year_2'),
'projection_year_3' => $request->input('projection_year_3'),
'year_of_encoding' => $request->input('year_of_encoding'),
]);
$arr = ["Occidental Mindoro", "Oriental Mindoro", "Marinduque", "Romblon","Palawan","Puerto Princesa City"];
if($create)
{
foreach ($arr as $value) {
$details = ProjectedPopulation::create([
'projected_population_infos_id' => $create->id,
'province' => $value,
'actual_year_1' => 0,
'actual_year_2' => 0,
'growth_rate' => 0,
'projection_year_1' => 0,
'projection_year_2' => 0,
'projection_year_3' => 0,
]);
}
}
return 'success';
}
public function updateProjectedPopulation(Request $request)
{
$data = $request->input('data');
foreach ($data as $value) {
$update = ProjectedPopulation::where('id', $value["id"])->update([
'actual_year_1' => $value["actual_year_1"],
'actual_year_2' => $value["actual_year_2"],
'growth_rate' => $value["growth_rate"],
'projection_year_1' => $value["projection_year_1"],
'projection_year_2' => $value["projection_year_2"],
'projection_year_3' => $value["projection_year_3"],
]);
}
return 'success';
}
public function getYearPopulationDensity(Request $request)
{
return PopulationDensityInfo::get();
}
public function getPopulationDensity(Request $request)
{
$population_density_infos = PopulationDensityInfo::where('actual_year', $request->input('actual_year'))->first();
if($population_density_infos)
{
$population_densities = PopulationDensity::where('population_density_infos_id', $population_density_infos->id)
->join('municipalities','municipalities.id','=','population_densities.municipality')
->select('municipalities.id as municipality','municipalities.municipality as municipality_name','population_densities.*','municipalities.district')
->get();
}else{
$population_densities = [];
$population_density_infos = [];
}
return [
'population_densities' => $population_densities,
'population_density_infos' => $population_density_infos,
];
}
public function createPopulationDensity(Request $request)
{
$create = PopulationDensityInfo::create([
'old_year' => $request->input('old_year'),
'actual_year' => $request->input('actual_year'),
'estimated' => $request->input('estimated'),
]);
foreach (Municipality::get() as $value) {
$info = PopulationDensity::create([
'population_density_infos_id' => $create->id,
'municipality' => $value->id,
'gross_old' => 0,
'gross_actual' => 0,
'urban_old' => 0,
'urban_actual' => 0,
'estimated_gross' => 0,
]);
}
return 'success';
}
public function updatePopulationDensity(Request $request)
{
$data = $request->input('data');
foreach ($data as $value) {
$update = PopulationDensity::where('id', $value["id"])
->update([
'gross_old' => $value["gross_old"],
'gross_actual' => $value["gross_actual"],
'urban_old' => $value["urban_old"],
'urban_actual' => $value["urban_actual"],
'estimated_gross' => $value["estimated_gross"],
]);
}
return 'success';
}
public function getActualProjected(Request $request)
{
$actual_projected_infos = ActualProjectedInfo::where('actual_year', $request->input('actual_year'))->first();
if($actual_projected_infos)
{
$actual_projected = ActualProjected::where('actual_projected_infos_id', $actual_projected_infos->id)
->join('municipalities','municipalities.id','=','actual_projecteds.municipality')
->select('municipalities.id as municipality','municipalities.municipality as municipality_name','actual_projecteds.*','municipalities.district')
->get();
}else{
$actual_projected = [];
$actual_projected_infos = [];
}
return [
'actual_projected_infos' => $actual_projected_infos,
'actual_projected' => $actual_projected,
];
}
public function createActualProjected(Request $request)
{
$create_info = ActualProjectedInfo::create([
'old_year' => $request->input('old_year'),
'actual_year' => $request->input('actual_year'),
'projected_year' => $request->input('projected_year'),
'growth_rate_1' => $request->input('growth_rate_1'),
'growth_rate_2' => $request->input('growth_rate_2'),
]);
foreach (Municipality::get() as $value) {
$create = ActualProjected::create([
'actual_projected_infos_id' => $create_info->id,
'municipality' => $value->id,
'old_year_pop' => 0,
'old_year_hh' => 0,
'actual_year_pop' => 0,
'actual_year_hh' => 0,
'projected_year_pop' => 0,
'projected_year_hh' => 0,
'growth_rate_1' => 0,
'growth_rate_2' => 0,
]);
}
return 'success';
}
public function getYearActualProjected(Request $request)
{
$actual_year = ActualProjectedInfo::get();
return $actual_year;
}
public function updateActualProjected(Request $request)
{
$data = $request->input('data');
foreach ($data as $value) {
$update = ActualProjected::where('id', $value["id"])
->update([
'old_year_pop' => $value["old_year_pop"],
'old_year_hh' => $value["old_year_hh"],
'actual_year_pop' => $value["actual_year_pop"],
'actual_year_hh' => $value["actual_year_hh"],
'projected_year_pop' => $value["projected_year_pop"],
'projected_year_hh' => $value["projected_year_hh"],
'growth_rate_1' => $value["growth_rate_1"],
'growth_rate_2' => $value["growth_rate_2"],
]);
}
return 'success';
}
public function getAncestralDomain(Request $request){
$year = date('o');
if($request->has('year'))
{
$year = $request->input('year');
}
return AncestralDomain::where('year', $year)->get();
}
public function createAncestralDomain(Request $request)
{
$create = AncestralDomain::create([
'name_address' => $request->input('name_address'),
'ethnolinguistic' => $request->input('ethnolinguistic'),
'estimated_area' => $request->input('estimated_area'),
'year' => $request->input('year'),
]);
return 'success';
}
public function updateAncestralDomain(Request $request)
{
$update = AncestralDomain::where('id', $request->input('id'))
->update([
'name_address' => $request->input('name_address'),
'ethnolinguistic' => $request->input('ethnolinguistic'),
'estimated_area' => $request->input('estimated_area'),
'year' => $request->input('year'),
]);
return 'success';
}
public function deleteAncestralDomain(Request $request)
{
$delete = AncestralDomain::where('id', $request->input('id'))->delete();
return 'success';
}
public function getPopulationByCensusYear()
{
$info = PopulationByCensusYear::join('municipalities','municipalities.id','=','population_by_census_years.municipality')
->select('municipalities.id as municipality','municipalities.municipality as municipality_name','population_by_census_years.*','municipalities.district')
->get();
$arr["data"] = [];
$arr["year"] = [];
$years = PopulationByCensusYear::groupBy('year')->get();
$year = [];
foreach ($years as $value) {
$year[] = $value->year;
}
foreach ($info as $value) {
$arr["data"][$value->municipality_name][] = [
'year' => $value->year,
'id' => $value->id,
'population' => $value->population,
];
}
$arr["year"] = $year;
return $arr;
}
public function createPopulationByCensusYear(Request $request)
{
foreach (Municipality::get() as $value) {
$check = PopulationByCensusYear::where([
'year' => $request->input('year'),
'municipality' => $value->id,
])->first();
if(!$check)
{
$create = PopulationByCensusYear::create([
'municipality' => $value->id,
'population' => 0,
'year' => $request->input('year'),
]);
}
}
return 'true';
}
public function updatePopulationByCensusYear(Request $request)
{
$data = $request->input('data');
foreach ($data as $value) {
foreach ($value as $info) {
$update = PopulationByCensusYear::where('id', $info["id"])
->update(['population' => $info["population"]]);
}
}
return 'success';
}
public function getHouseholdPopulationBySex()
{
$info = HouseholdPopulationBySex::join('municipalities','municipalities.id','=','household_population_by_sexes.municipality')
->select('municipalities.id as municipality','municipalities.municipality as municipality_name','household_population_by_sexes.*','municipalities.district')
->get();
$arr["data"] = [];
$arr["year"] = [];
$years = HouseholdPopulationBySex::groupBy('year')->get();
$year = [];
foreach ($years as $value) {
$year[] = $value->year;
}
foreach ($info as $value) {
$arr["data"][$value->municipality_name][] = [
'year' => $value->year,
'id' => $value->id,
'female' => $value->female,
'male' => $value->male,
];
}
$arr["year"] = $year;
return $arr;
}
public function createHouseholdPopulationBySex(Request $request)
{
foreach (Municipality::get() as $value) {
$check = HouseholdPopulationBySex::where([
'year' => $request->input('year'),
'municipality' => $value->id,
])->first();
if(!$check)
{
$create = HouseholdPopulationBySex::create([
'municipality' => $value->id,
'female' => 0,
'male' => 0,
'year' => $request->input('year'),
]);
}
}
return 'true';
}
public function updateHouseholdPopulationBySex(Request $request)
{
$data = $request->input('data');
foreach ($data as $value) {
foreach ($value as $info) {
$update = HouseholdPopulationBySex::where('id', $info["id"])
->update([
'female' => $info["female"],
'male' => $info["male"]
]);
}
}
return 'success';
}
public function getMangyanTribe(Request $request){
return MangyanTribe::get();
}
public function createMangyanTribe(Request $request){
$create = MangyanTribe::create([
'mangyan_tribe' => $request->input('mangyan_tribe'),
'description' => $request->input('description'),
]);
return 'success';
}
public function updateMangyanTribe(Request $request){
$update = MangyanTribe::where('id', $request->input('id'))->update([
'mangyan_tribe' => $request->input('mangyan_tribe'),
'description' => $request->input('description'),
]);
return 'success';
}
public function deleteMangyanTribe(Request $request){
$delete = MangyanTribe::where('id', $request->input('id'))->delete();
return 'success';
}
}
<file_sep>/app/Http/Controllers/ExcelController.php
<?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use Excel;
use App\Imports\CbmsImport;
use App\Imports\AddressImport;
use App\Http\Controllers\FileController;
use Storage;
class ExcelController extends Controller
{
public function uploadCBMS(Request $request)
{
// $explode = explode(',', $request->input('cbms'));
// $file = base64_decode($request->input('cbms'));
$file = FileController::uf_base64($request->input('cbms'), '/cbms');
$excel = Excel::import(new CbmsImport($request->input('year')), $file, 'public',\Maatwebsite\Excel\Excel::XLSX);
}
public function addBarangay(Request $request)
{
$excel = Excel::import(new AddressImport(), '/city_mun.xlsx', 'public',\Maatwebsite\Excel\Excel::XLSX);
return 'success';
}
}
<file_sep>/app/MetallicMineralResource.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class MetallicMineralResource extends Model
{
protected $fillable = [
'mineral',
'municipality',
'year',
];
}
<file_sep>/database/migrations/2018_12_06_083957_create_projected_populations_table.php
<?php
use Illuminate\Support\Facades\Schema;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Database\Migrations\Migration;
class CreateProjectedPopulationsTable extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create('projected_populations', function (Blueprint $table) {
$table->increments('id');
$table->integer('projected_population_infos_id');
$table->string('province');
$table->integer('actual_year_1')->default(0);
$table->integer('actual_year_2')->default(0);
$table->decimal('growth_rate', 2, 2)->default(0);
$table->integer('projection_year_1')->default(0);
$table->integer('projection_year_2')->default(0);
$table->integer('projection_year_3')->default(0);
$table->timestamps();
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::dropIfExists('projected_populations');
}
}
<file_sep>/database/migrations/2018_10_17_014536_create_registered_vehicles_table.php
<?php
use Illuminate\Support\Facades\Schema;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Database\Migrations\Migration;
class CreateRegisteredVehiclesTable extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create('registered_vehicles', function (Blueprint $table) {
$table->increments('id');
$table->integer('cars')->default(0);
$table->integer('suv_uv')->default(0);
$table->integer('tricycle')->default(0);
$table->integer('truck')->default(0);
$table->integer('truck_bus')->default(0);
$table->integer('motorcycle')->default(0);
$table->integer('uv_jeep')->default(0);
$table->integer('month');
$table->year('year');
$table->timestamps();
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::dropIfExists('registered_vehicles');
}
}
<file_sep>/database/migrations/2018_10_02_080517_create_land_classification_statistics_table.php
<?php
use Illuminate\Support\Facades\Schema;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Database\Migrations\Migration;
class CreateLandClassificationStatisticsTable extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create('land_classification_statistics', function (Blueprint $table) {
$table->increments('id');
$table->decimal('certified_alienable_and_disposable_land', 20, 2);
$table->decimal('forest_land_unclassified', 20, 2);
$table->decimal('forest_land_classified', 20, 2);
$table->year('year');
$table->timestamps();
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::dropIfExists('land_classification_statistics');
}
}
<file_sep>/app/PersonnelByStatus.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class PersonnelByStatus extends Model
{
protected $fillable = [
'permanent_first_male',
'permanent_first_female',
'permanent_second_male',
'permanent_second_female',
'co_terminous_first_male',
'co_terminous_first_female',
'co_terminous_second_male',
'co_terminous_second_female',
'elective_male',
'elective_female',
'year',
];
}
<file_sep>/app/GeographicalZoneSurfaces.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class GeographicalZoneSurfaces extends Model
{
//
protected $fillable = [
'coastal_area',
'lake_areas',
'total_land_mass',
'year',
];
}
<file_sep>/app/NumberRateBdimd.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class NumberRateBdimd extends Model
{
protected $fillable = [
'indicator',
'number',
'rate',
'year',
];
}
<file_sep>/app/Http/Controllers/FileController.php
<?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use Storage;
class FileController extends Controller
{
public function test_upload(Request $request)
{
$data = $request->file('files');
foreach ($data as $value) {
$path = Storage::putFile('avatars', $value);
}
// return $request->input('folder');
}
// This will be out file upload controller
public static function uf_base64($base64_file, $path, $filename = null)
{
// lets create a pool of letters for our random file name
$pool = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ';
$file = $base64_file;
$check_base64 = strrpos($file, "base64"); // check if the file request is a valid base64 file
if($check_base64 > 0)
{
// if true it is a valid base64 file
$explode = explode(",", $file);
// $explode[0] = position the base64 raw information
// $explode[1] = position the base 64 file
// decode the base64 file
$decoded_file = base64_decode($explode[1]); // --> valid file
// Creating random file name
$date = date('dmyYHis'); // i would like to use date to make more unique
$random_string = substr(str_shuffle(str_repeat($pool, 5)),0,10);
// get the file extension
$file_extension = self::uf_get_base64_file_extension($explode[0]); // we will pass the raw base64 information
// now lets create our filename
if($filename == null)
{
$filename = $date. $random_string . '.' . $file_extension;
}else{
$filename .= '.' . $file_extension;
}
// now lets store it on our local storage
Storage::disk('public')->put($path. "/" . $filename, $decoded_file, 'public');
// lets return the path
$url = $path . "/" . $filename;
return $url;
}
else
{
return $file;
}
}
// file extension function
public static function uf_get_base64_file_extension($base64_raw_info)
{
// We will now get the MIME type of the base64 using the $base65 raw information
$mime = str_replace(';base64', '', $base64_raw_info); // we will trimed the base64 raw information
$mime = str_replace('data:', '', $mime); // this last trimmed will be result into the mime type of the file
$extension_arr = [
"audio/aac" => "aac",
"application/x-abiword" => "abw",
"application/octet-stream" => "arc",
"video/x-msvideo" => "avi",
"application/vnd.amazon.ebook" => "azw",
"application/octet-stream" => "bin",
"image/bmp" => "bmp",
"application/x-bzip" => "bz",
"application/x-bzip2" => "bz2",
"application/x-csh" => "csh",
"text/css" => "css",
"text/csv" => "csv",
"application/msword" => "doc",
"application/vnd.openxmlformats-officedocument.wordprocessingml.document" => "docx",
"application/vnd.ms-fontobject" => "eot",
"application/epub+zip" => "epub",
"application/ecmascript" => "es",
"image/gif" => "gif",
"text/html" => "html",
"image/x-icon" => "ico",
"text/calendar" => "ics",
"application/java-archive" => "jar",
"image/jpeg" => "jpeg",
"application/javascript" => "js",
"application/json" => "json",
"audio/midi audio/x-midi" => "midi",
"video/mpeg" => "mpeg",
"application/vnd.apple.installer+xml" => "mpkg",
"application/vnd.oasis.opendocument.presentation" => "odp",
"application/vnd.oasis.opendocument.spreadsheet" => "ods",
"application/vnd.oasis.opendocument.text" => "odt",
"audio/ogg" => "oga",
"video/ogg" => "ogv",
"application/ogg" => "ogx",
"font/otf" => "otf",
"image/png" => "png",
"application/pdf" => "pdf",
"application/vnd.ms-powerpoint" => "ppt",
"application/vnd.openxmlformats-officedocument.presentationml.presentation" => "pptx",
"application/x-rar-compressed" => "rar",
"application/rtf" => "rtf",
"application/x-sh" => "sh",
"image/svg+xml" => "svg",
"application/x-shockwave-flash" => "swf",
"application/x-tar" => "tar",
"image/tiff" => "tiff",
"application/typescript" => "ts",
"font/ttf" => "ttf",
"text/plain" => "txt",
"application/vnd.visio" => "vsd",
"audio/wav" => "wav",
"audio/webm" => "weba",
"video/webm" => "webm",
"image/webp" => "webp",
"image/woff" => "woff",
"font/woff2" => "woff2",
"application/xhtml+xml" => "xhtml",
"application/vnd.ms-excel" => "xls",
"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet" => "xlsx",
"application/xml" => "xml",
"application/vnd.mozilla.xul+xml" => "xul",
"application/zip" => "zip",
"video/3gpp" => "3gp",
"audio/3gpp" => "3gp",
"video/3gpp2" => "3g2",
"audio/3gpp2" => "3g2",
"application/x-7z-compressed" => "7z",
]; // I created this array before hand ^_^ this set containes the mime type and its file extension
// using the mime type
// lets return the real file extension
return $extension_arr[$mime]; // this will return the file extension
}
}
<file_sep>/app/CrimeStatisticsAndProtectiveFacility.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class CrimeStatisticsAndProtectiveFacility extends Model
{
protected $fillable = [
'group_crime',
'crime',
'number',
'year',
];
}
<file_sep>/app/CoastalResource.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class CoastalResource extends Model
{
protected $fillable = [
'municipality',
'municipal_water',
'mpa',
'mpa_percent',
'year'
];
public static function municipality(){
$district = [1,2,3,4,5,7,9,10,11,12,13,14,15];
return $district;
}
}
<file_sep>/app/AccessList.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
use Illuminate\Database\Eloquent\SoftDeletes;
class AccessList extends Model
{
protected $dates = ['deleted_at'];
use SoftDeletes;
protected $fillable = [
'access_key',
'access_name',
'description',
];
}
<file_sep>/app/Http/Controllers/LandAndOtherNaturalResourcesController.php
<?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use App\LandAreaByMunicipality;
use App\Municipality;
use App\LandClassificationStatistic;
use App\GeographicalZoneSurfaces;
use App\NonMetallicMineral;
use App\MajorWatershed;
use App\CoastalResource;
use App\EstablishedMarineProtectedArea;
use App\ForestCover;
use App\MetallicMineralResource;
class LandAndOtherNaturalResourcesController extends Controller
{
public function getLandAreaByMunicipality(Request $request)
{
$year = date('o');
if($request->has('year'))
{
$year = $request->input('year');
}
return $this->createLandAreaByMunicipality($year);
}
public function createLandAreaByMunicipality($year)
{
$check = LandAreaByMunicipality::where('year', $year)->get();
if(count($check) <= 0)
{
$municipality = Municipality::get();
foreach ($municipality as $key => $value) {
$addLandArea = LandAreaByMunicipality::create([
'municipality' => $value->id,
'land_area' => 0,
'year' => $year,
]);
}
}
return LandAreaByMunicipality::where('year', $year)->join('municipalities','municipalities.id','=','land_area_by_municipalities.municipality')->select('land_area_by_municipalities.*','municipalities.municipality','municipalities.district',"municipalities.id as municipality_id")->get();
}
public function updateLandAreaMuniticipalty(Request $request)
{
$data = $request->input('data');
foreach ($data as $value) {
LandAreaByMunicipality::where([
'id' => $value['id'],
])
->update([
'land_area' => $value['land_area']
]);
}
return 'success';
}
public function getLandClassificationStatistics(Request $request)
{
if($request->has('year'))
{
$land = LandClassificationStatistic::where('year', $request->input('year'))->first();
if(is_null($land))
{
$land = $this->createLandClassificationStatistics($request->input('year'));
}
return $land;
}
else
{
$land = LandClassificationStatistic::where('year', date('o'))->first();
if(is_null($land))
{
$land = $this->createLandClassificationStatistics(date('o'));
}
return $land;
}
}
public function createLandClassificationStatistics($year)
{
$land = LandClassificationStatistic::create([
'certified_alienable_and_disposable_land' => 0,
'forest_land_classified' => 0,
'forest_land_unclassified' => 0,
'year' => $year,
]);
return $land;
}
public function updateLandClassificationStatistics(Request $request)
{
$request->validate([
'certified_alienable_and_disposable_land' => 'required',
'forest_land_classified' => 'required',
'forest_land_unclassified' => 'required',
]);
$update = LandClassificationStatistic::where('id', $request->input('id'))
->update([
'certified_alienable_and_disposable_land' => $request->input('certified_alienable_and_disposable_land'),
'forest_land_classified' => $request->input('forest_land_classified'),
'forest_land_unclassified' => $request->input('forest_land_unclassified'),
]);
return 'success';
}
public function getGeographicalZoneSurfaces(Request $request)
{
if($request->has('year'))
{
$geographic = GeographicalZoneSurfaces::where('year', $request->input('year'))->first();
if(is_null($geographic))
{
$geographic = $this->createGeographicalZoneSurfaces($request->input('year'));
}
return $geographic;
}
else
{
$geographic = GeographicalZoneSurfaces::where('year', date('o'))->first();
if(is_null($geographic))
{
$geographic = $this->createGeographicalZoneSurfaces(date('o'));
}
return $geographic;
}
}
public function createGeographicalZoneSurfaces($year){
$geo = GeographicalZoneSurfaces::create([
'coastal_area' => '324.45 km.',
'lake_areas' => '81.28 sq. km.',
'total_land_mass' => '89% of its total land mass',
'year' => $year,
]);
return $geo;
}
public function updateGeographicalZoneSurfaces(Request $request)
{
$validate = $request->validate([
'coastal_area' => 'required',
'lake_areas' => 'required',
'total_land_mass' => 'required',
]);
$update = GeographicalZoneSurfaces::where('id', $request->input('id'))
->update([
'coastal_area' => $request->input('coastal_area'),
'lake_areas' => $request->input('lake_areas'),
'total_land_mass' => $request->input('total_land_mass'),
]);
return 'success';
}
public function createNonMetallicMineral($year)
{
$check_info = NonMetallicMineral::where('year', $year)->get();
if(count($check_info) < 1)
{
$municipality = Municipality::get();
foreach ($municipality as $value) {
$create = NonMetallicMineral::create([
'municipality' => $value->id,
'cubic_meter' => 0,
'amount' => 0,
'year' => $year
]);
}
}
return NonMetallicMineral::where('year', $year)
->select('non_metallic_minerals.*','municipalities.municipality','municipalities.district',"municipalities.id as municipality_id")
->join('municipalities','municipalities.id','=','non_metallic_minerals.municipality')
->get();
}
public function getNonMetallicMineral(Request $request){
if($request->has('year'))
{
return $this->createNonMetallicMineral($request->input('year'));
}else{
return $this->createNonMetallicMineral(date('o'));
}
}
public function updateNonMetallicMineral(Request $request)
{
$arr = $request->input('data');
foreach ($arr as $value) {
$update = NonMetallicMineral::where('id', $value["id"])
->update([
'cubic_meter' => $value['cubic_meter'],
'amount' => $value['amount'],
]);
}
return 'success';
}
public function getMajorWatershed(Request $request)
{
if($request->has('year'))
{
return MajorWatershed::where('year', $request->input('year'))->get();
}else{
return MajorWatershed::where('year', date('o'))->get();
}
}
public function createMajorWatershed(Request $request)
{
$validate = $request->validate([
'name' => 'required',
'year' => 'required',
]);
$create = MajorWatershed::create([
'name' => $request->input('name'),
'location' => $request->input('location'),
'classification' => $request->input('classification'),
'area' => $request->input('area'),
'year' => $request->input('year'),
]);
return 'success';
}
public function updateMajorWatershed(Request $request)
{
$update = MajorWatershed::where('id', $request->input('id'))
->update([
'name' => $request->input('name'),
'location' => $request->input('location'),
'classification' => $request->input('classification'),
'area' => $request->input('area'),
]);
return 'success';
}
public function deleteMajorWatershed(Request $request)
{
$delete = MajorWatershed::where('id', $request->input('id'))->delete();
return 'success';
}
public function getCoastalResource(Request $request)
{
if($request->has('year'))
{
return $this->createCoastalResource($request->input('year'));
}else{
return $this->createCoastalResource(date('o'));
}
}
public function createCoastalResource($year)
{
$coastal_resources = CoastalResource::where('year', $year)
->join('municipalities','municipalities.id','=','coastal_resources.municipality')
->select('coastal_resources.*', 'municipalities.municipality','municipalities.id as municipality_id')
->get();
if(count($coastal_resources) <= 0)
{
$municipality = CoastalResource::municipality();
foreach ($municipality as $value) {
CoastalResource::create([
'municipality' => $value,
'municipal_water' => 0,
'mpa' => 0,
'mpa_percent' => 0,
'year' => $year,
]);
}
$coastal_resources = CoastalResource::where('year', $year)
->join('municipalities','municipalities.id','=','coastal_resources.municipality')
->select('coastal_resources.*', 'municipalities.municipality','municipalities.id as municipality_id')
->get();
}
return $coastal_resources;
}
public function updateCoastalResource(Request $request)
{
$arr = $request->input('data');
foreach ($arr as $value) {
CoastalResource::where('id', $value["id"])
->update([
'municipal_water' =>$value["municipal_water"],
'mpa' =>$value["mpa"],
'mpa_percent' =>$value["mpa_percent"],
]);
}
return 'success';
}
public function getEstablishedMarineProtectedArea(Request $request)
{
$arr = [];
if($request->has('year'))
{
$arr = EstablishedMarineProtectedArea::join('municipalities','municipalities.id','established_marine_protected_areas.municipality')
->select('established_marine_protected_areas.*', 'municipalities.municipality as municipality_name')
// ->where('year', $request->input('year'))
->get();
}else{
$arr = EstablishedMarineProtectedArea::join('municipalities','municipalities.id','established_marine_protected_areas.municipality')
->select('established_marine_protected_areas.*', 'municipalities.municipality as municipality_name')
// ->where('year', date('o'))
->get();
}
$data = [];
foreach ($arr as $value) {
$data[$value->municipality_name][] = [
'id' => $value->id,
'municipality' => $value->municipality,
'name_of_mpa' => $value->name_of_mpa,
'location' => $value->location,
'year_stablished' => $value->year_stablished,
'estimated_area' => $value->estimated_area,
'year' => $value->year,
];
}
return $data;
}
public function createEstablishedMarineProtectedArea(Request $request)
{
$create = EstablishedMarineProtectedArea::create([
'municipality' => $request->input('municipality'),
'name_of_mpa' => $request->input('name_of_mpa'),
'location' => $request->input('location'),
'year_stablished' => $request->input('year_stablished') ?? 2019,
'estimated_area' => $request->input('estimated_area'),
'year' => $request->input('year'),
]);
return 'success';
}
public function updateEstablishedMarineProtectedArea(Request $request)
{
$update = EstablishedMarineProtectedArea::where('id', $request->input('id'))
->update([
'municipality' => $request->input('municipality'),
'name_of_mpa' => $request->input('name_of_mpa'),
'location' => $request->input('location'),
'year_stablished' => $request->input('year_stablished'),
'estimated_area' => $request->input('estimated_area'),
'year' => $request->input('year'),
]);
}
public function deleteEstablishedMarineProtectedArea(Request $request)
{
$delete = EstablishedMarineProtectedArea::where('id', $request->input('id'))->delete();
return 'success';
}
public function getForestCover(Request $request)
{
$year = date('o');
if($request->has('year'))
{
$year = $request->input('year');
}
return $this->createForestCover($year);
}
public function createForestCover($year)
{
$count = ForestCover::where('year', $year)->get();
if(count($count) <= 0)
{
$arr = ["Marinduque", "Occidental Mindoro ", "Oriental Mindoro", "Palawan","Romblon"];
foreach ($arr as $value) {
$create = ForestCover::create([
'province' => $value,
'year' => $year,
]);
}
}
return ForestCover::where('year', $year)->get();
}
public function updateForestCover(Request $request)
{
$data = $request->input('data');
foreach ($data as $value) {
$update = ForestCover::where('id', $value["id"])
->update([
'closed_forest' => $value["closed_forest"],
'open_forest' => $value["open_forest"],
'mangrove' => $value["mangrove"],
]);
}
}
public function getMetallicMineralResource(Request $request)
{
$year = date('o');
if($request->has('year'))
{
$year = $request->input('year');
}
return MetallicMineralResource::where('year', $year)->get();
}
public function createMetallicMineralResource(Request $request)
{
$create = MetallicMineralResource::create([
'mineral' => $request->input('mineral'),
'municipality' => $request->input('municipality'),
'year' => $request->input('year'),
]);
return 'success';
}
public function updateMetallicMineralResource(Request $request)
{
$update = MetallicMineralResource::where('id', $request->input('id'))
->update([
'mineral' => $request->input('mineral'),
'municipality' => $request->input('municipality'),
]);
return 'success';
}
public function deleteMetallicMineralResource(Request $request)
{
$delete = MetallicMineralResource::where('id', $request->input('id'))->delete();
return 'success';
}
}
<file_sep>/app/VegetableProduction.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class VegetableProduction extends Model
{
protected $fillable = [
'commodity',
'area_harvested',
'production_mt',
'arerage_yield',
'year',
];
}
<file_sep>/database/seeds/UsersTableSeeder.php
<?php
use Illuminate\Database\Seeder;
use App\User;
use App\GeneralInformation;
use App\Municipality;
use App\PersonnelByOffice;
use App\ImplementorAgencyList;
use App\MonitoredByList;
class UsersTableSeeder extends Seeder
{
/**
* Run the database seeds.
*
* @return void
*/
public function run()
{
$monitored_list = [
["monitor" => "<NAME>, PEO IV"],
["monitor" => "<NAME>, PEO III"],
["monitor" => "<NAME>, PEO II"],
["monitor" => "<NAME>, PEO I"],
["monitor" => "Provincial Project Monitoring Team"],
];
$implementor = [
[ "implementor" => "Local Government Unit"],
[ "implementor" => "Others"],
[ "implementor" => "Provincial Engineering Office"],
[ "implementor" => "Provincial Social Welfare and Development Office"],
[ "implementor" => "Provincial Veterinary Office"],
[ "implementor" => "Provincial Health Office"],
[ "implementor" => "Provincial Agriculture Office"],
[ "implementor" => "GO-Education, Employment Services Division"],
[ "implementor" => "GO-SICAD"],
[ "implementor" => "PG-Environment and Natural Resources Office"],
[ "implementor" => "Provincial Tourism Investments and Enterprise Development Office"],
[ "implementor" => "DPWH-Southern Mindoro District Engineering Office"],
[ "implementor" => "DPWH-Mindoro Oriental District Engineering Office"],
[ "implementor" => "Department of the Interior and Local Government"],
];
GeneralInformation::create([
'location' => 'Oriental Mindoro is located in MIMAROPA Region. It lies 45 kilometers south of Batangas and 130 kilometers south of Manila.',
'topography' => 'Oriental Mindoro has a rugged terrain and an irregular coastline. Numerous rivers and streams traverse the province but none are navigable by large vessels.',
'land_area' => 'Oriental Mindoro has a total land area of 4,364.72 square kilometers or 436,472 hectares. It represents 1.5 percent of the total land area of the country and 16 percent of the MIMAROPA Region',
'boundary' => 'It is bounded by Verde Island Passage of the North; Maestro del Campo Island and Tablas Strait on the East; Semirara Island on the South; and Occidental Mindoro on the West.',
'climate' => 'The province has two climate types: Type I and Type III. Type I is characterized by two pronounced seasons, dry and wet. Type III has no pronounced season, relatively dry from November to April and wet during the rest of the year.',
'capital' => 'Calapan City',
'date_of_approval' => '1950-11-15',
'legal_basis' => 'RA 505 (Division of Mindoro)',
'no_of_component_city' => '1',
'no_of_municipality' => '14',
'no_of_congressional_district' => '2',
'no_of_barangay' => '426',
]);
$user = [
'email' => '<EMAIL>',
'name' => '<NAME>',
'password' => <PASSWORD>('<PASSWORD>'),
'role' => 1,
];
$personnelbyoffice = [
[
'name_of_office' => "Office of the Governor",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 1,
'year' => date('o'),
],
[
'name_of_office' => "Management Support Staff Division",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 1,
'year' => date('o'),
],
[
'name_of_office' => "Internal Audit Service Division",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 1,
'year' => date('o'),
],
[
'name_of_office' => "Special Concerns Division",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 1,
'year' => date('o'),
],
[
'name_of_office' => "Disaster Risk Reduction and Management Division",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 1,
'year' => date('o'),
],
[
'name_of_office' => "Communication and Public Relations Division",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 1,
'year' => date('o'),
],
[
'name_of_office' => "Education and Employment Services Division",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 1,
'year' => date('o'),
],
[
'name_of_office' => "Office of the Vice-Governor and Sangguniang Panlalawigan ",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 2,
'year' => date('o'),
],
[
'name_of_office' => "Provincial Administrator’s Office",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 3,
'year' => date('o'),
],
[
'name_of_office' => "Provincial Planning and Development Office ",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 4,
'year' => date('o'),
],
[
'name_of_office' => "Provincial Treasurer’s Office",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 5,
'year' => date('o'),
],
[
'name_of_office' => "Provincial Accounting Office",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 6,
'year' => date('o'),
],
[
'name_of_office' => "Provincial Budget Office",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 7,
'year' => date('o'),
],
[
'name_of_office' => "Provincial Assessor’s Office",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 8,
'year' => date('o'),
],
[
'name_of_office' => "Provincial Human Resource Management Office",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 9,
'year' => date('o'),
],
[
'name_of_office' => "Provincial General Services Office",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 10,
'year' => date('o'),
],
[
'name_of_office' => "Provincial Legal Office",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 11,
'year' => date('o'),
],
[
'name_of_office' => "Provincial Social Welfare and Development Office",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 12,
'year' => date('o'),
],
[
'name_of_office' => "Provincial Health Office",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 13,
'year' => date('o'),
],
[
'name_of_office' => "Oriental Mindoro Provincial Hospital",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 13,
'year' => date('o'),
],
[
'name_of_office' => "Oriental Mindoro Central District Hospital",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 13,
'year' => date('o'),
],
[
'name_of_office' => "Naujan Community Hospital",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 13,
'year' => date('o'),
],
[
'name_of_office' => "Bulalacao Community Hospital",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 13,
'year' => date('o'),
],
[
'name_of_office' => "Oriental Mindoro Southern District Hospital",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 13,
'year' => date('o'),
],
[
'name_of_office' => "Provincial Engineering Office",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 14,
'year' => date('o'),
],
[
'name_of_office' => "Provincial Agriculture Office",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 15,
'year' => date('o'),
],
[
'name_of_office' => "Provincial Veterinary Office",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 16,
'year' => date('o'),
],
[
'name_of_office' => "Environment and Natural Resource Office",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 17,
'year' => date('o'),
],
[
'name_of_office' => "Provincial Tourism, Investments and Enterprise Development Office ",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 18,
'year' => date('o'),
],
[
'name_of_office' => "Botika ng Lalawigan ng Oriental Mindoro ",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 19,
'year' => date('o'),
],
];
PersonnelByOffice::insert($personnelbyoffice);
ImplementorAgencyList::insert($implementor);
User::insert($user);
MonitoredByList::insert($monitored_list);
}
}
<file_sep>/database/migrations/2018_12_06_095431_create_urban_rural_populations_table.php
<?php
use Illuminate\Support\Facades\Schema;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Database\Migrations\Migration;
class CreateUrbanRuralPopulationsTable extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create('urban_rural_populations', function (Blueprint $table) {
$table->increments('id');
$table->integer('urban_rural_population_infos_id');
$table->integer('municipality');
$table->integer('no_of_barangays_urban');
$table->integer('no_of_barangays_rural');
$table->integer('population_urban_year_1');
$table->integer('population_rural_year_1');
$table->integer('population_urban_year_2');
$table->integer('population_rural_year_2');
$table->timestamps();
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::dropIfExists('urban_rural_populations');
}
}
<file_sep>/app/AncestralDomain.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class AncestralDomain extends Model
{
protected $fillable = [
'name_address',
'ethnolinguistic',
'estimated_area',
'year',
];
}
<file_sep>/database/migrations/2018_12_05_110448_create_population_densities_table.php
<?php
use Illuminate\Support\Facades\Schema;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Database\Migrations\Migration;
class CreatePopulationDensitiesTable extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create('population_densities', function (Blueprint $table) {
$table->increments('id');
$table->integer('population_density_infos_id');
$table->integer('municipality');
$table->integer('gross_old')->default(0);
$table->integer('gross_actual')->default(0);
$table->integer('urban_old')->default(0);
$table->integer('urban_actual')->default(0);
$table->integer('estimated_gross')->default(0);
$table->timestamps();
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::dropIfExists('population_densities');
}
}
<file_sep>/app/EstablishedMarineProtectedArea.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class EstablishedMarineProtectedArea extends Model
{
protected $fillable = [
'municipality',
'name_of_mpa',
'location',
'year_stablished',
'estimated_area',
'year',
];
}
<file_sep>/app/HouseholdPopulationBySex.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class HouseholdPopulationBySex extends Model
{
protected $fillable = [
'municipality',
'female',
'male',
'year',
];
}
<file_sep>/app/FirePrevention.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class FirePrevention extends Model
{
protected $fillable = [
'municipality',
'no_of_fire_station_bfp',
'no_of_fire_station_lgu',
'no_of_fire_trucks_bfp',
'no_of_fire_trucks_lgu',
'no_of_fire_trucks_ngo',
'no_of_personnel',
'year',
];
}
<file_sep>/app/HouseholdPopulationBySourceOfDrinkingWater.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class HouseholdPopulationBySourceOfDrinkingWater extends Model
{
protected $fillable = [
'source_of_drinking',
'magnitude',
'proportion',
'year',
];
}
<file_sep>/app/Imports/AddressImport.php
<?php
namespace App\Imports;
use Illuminate\Support\Collection;
use Maatwebsite\Excel\Concerns\ToCollection;
use App\Municipality;
use App\Barangay;
class AddressImport implements ToCollection
{
/**
* @param Collection $collection
*/
public function collection(Collection $collection)
{
foreach ($collection as $key => $value) {
if ($value[2] == "Mun")
{
}
elseif ($value[2] == "Bgy") {
Barangay::create([
'barangay' => $value[1],
'psgc' => $value[0],
'municipality_id' => $value[3],
]);
}
}
}
}
<file_sep>/database/migrations/2018_11_28_031906_create_forest_covers_table.php
<?php
use Illuminate\Support\Facades\Schema;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Database\Migrations\Migration;
class CreateForestCoversTable extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create('forest_covers', function (Blueprint $table) {
$table->increments('id');
$table->string('province');
$table->integer('closed_forest')->default(0);
$table->integer('open_forest')->default(0);
$table->integer('mangrove')->default(0);
$table->year('year');
$table->timestamps();
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::dropIfExists('forest_covers');
}
}
<file_sep>/database/migrations/2018_11_13_052415_create_performance_indicator_in_public_schools_table.php
<?php
use Illuminate\Support\Facades\Schema;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Database\Migrations\Migration;
class CreatePerformanceIndicatorInPublicSchoolsTable extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create('performance_indicator_in_public_schools', function (Blueprint $table) {
$table->increments('id');
$table->string('indicator');
$table->decimal('province_elementary');
$table->decimal('province_secondary');
$table->decimal('calapan_elementary');
$table->decimal('calapan_secondary');
$table->year('year');
$table->timestamps();
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::dropIfExists('performance_indicator_in_public_schools');
}
}
<file_sep>/app/PersonnelByOffice.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class PersonnelByOffice extends Model
{
protected $fillable = [
'name_of_office',
'no_plantilla_position',
'no_of_filled_up_position',
'group',
];
public static function getDefault($year)
{
$personnelbyoffice = [
[
'name_of_office' => "Office of the Governor",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 1,
'year' => $year,
],
[
'name_of_office' => "Management Support Staff Division",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 1,
'year' => $year,
],
[
'name_of_office' => "Internal Audit Service Division",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 1,
'year' => $year,
],
[
'name_of_office' => "Special Concerns Division",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 1,
'year' => $year,
],
[
'name_of_office' => "Disaster Risk Reduction and Management Division",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 1,
'year' => $year,
],
[
'name_of_office' => "Communication and Public Relations Division",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 1,
'year' => $year,
],
[
'name_of_office' => "Education and Employment Services Division",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 1,
'year' => $year,
],
[
'name_of_office' => "Office of the Vice-Governor and Sangguniang Panlalawigan ",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 2,
'year' => $year,
],
[
'name_of_office' => "Provincial Administrator’s Office",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 3,
'year' => $year,
],
[
'name_of_office' => "Provincial Planning and Development Office ",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 4,
'year' => $year,
],
[
'name_of_office' => "Provincial Treasurer’s Office",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 5,
'year' => $year,
],
[
'name_of_office' => "Provincial Accounting Office",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 6,
'year' => $year,
],
[
'name_of_office' => "Provincial Budget Office",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 7,
'year' => $year,
],
[
'name_of_office' => "Provincial Assessor’s Office",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 8,
'year' => $year,
],
[
'name_of_office' => "Provincial Human Resource Management Office",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 9,
'year' => $year,
],
[
'name_of_office' => "Provincial General Services Office",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 10,
'year' => $year,
],
[
'name_of_office' => "Provincial Legal Office",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 11,
'year' => $year,
],
[
'name_of_office' => "Provincial Social Welfare and Development Office",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 12,
'year' => $year,
],
[
'name_of_office' => "Provincial Health Office",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 13,
'year' => $year,
],
[
'name_of_office' => "Oriental Mindoro Provincial Hospital",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 13,
'year' => $year,
],
[
'name_of_office' => "Oriental Mindoro Central District Hospital",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 13,
'year' => $year,
],
[
'name_of_office' => "Naujan Community Hospital",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 13,
'year' => $year,
],
[
'name_of_office' => "Bulalacao Community Hospital",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 13,
'year' => $year,
],
[
'name_of_office' => "Oriental Mindoro Southern District Hospital",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 13,
'year' => $year,
],
[
'name_of_office' => "Provincial Engineering Office",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 14,
'year' => $year,
],
[
'name_of_office' => "Provincial Agriculture Office",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 15,
'year' => $year,
],
[
'name_of_office' => "Provincial Veterinary Office",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 16,
'year' => $year,
],
[
'name_of_office' => "Environment and Natural Resource Office",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 17,
'year' => $year,
],
[
'name_of_office' => "Provincial Tourism, Investments and Enterprise Development Office ",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 18,
'year' => $year,
],
[
'name_of_office' => "Botika ng Lalawigan ng Oriental Mindoro ",
'no_plantilla_position' => 0,
'no_of_filled_up_position' => 0,
'group' => 19,
'year' => $year,
],
];
return $personnelbyoffice;
}
}
<file_sep>/database/migrations/2018_10_30_073208_create_coconut_productions_table.php
<?php
use Illuminate\Support\Facades\Schema;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Database\Migrations\Migration;
class CreateCoconutProductionsTable extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create('coconut_productions', function (Blueprint $table) {
$table->increments('id');
$table->integer('municipality');
$table->integer('no_of_coconut_farmers')->default(0);
$table->integer('coconut_area')->default(0);
$table->integer('no_of_coco_trees')->default(0);
$table->integer('non_bearing')->default(0);
$table->integer('bearing')->default(0);
$table->integer('nut_tree_year')->default(0);
$table->integer('wholenuts')->default(0);
$table->integer('copra_equivalent')->default(0);
$table->year('year');
$table->timestamps();
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::dropIfExists('coconut_productions');
}
}
<file_sep>/app/UserAccess.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
use Illuminate\Database\Eloquent\SoftDeletes;
class UserAccess extends Model
{
use SoftDeletes;
protected $dates = ['deleted_at'];
protected $fillable = [
'user_id',
'access_id'
];
public function access()
{
return $this->hasOne('App\AccessList','id','access_id');
}
}
<file_sep>/app/AccommodationEstablishment.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class AccommodationEstablishment extends Model
{
protected $fillable = [
'municipality',
'no_of_accommodation_establishment',
'no_of_room',
'bedding_capacity',
'total_employment',
'year'
];
}
<file_sep>/app/RiceProduction.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class RiceProduction extends Model
{
protected $fillable = [
'municipality',
'no_of_farmers',
'area_harvested',
'production_mt',
'arerage_yield',
'year',
];
}
<file_sep>/database/migrations/2018_09_26_080139_create_general_informations_table.php
<?php
use Illuminate\Support\Facades\Schema;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Database\Migrations\Migration;
class CreateGeneralInformationsTable extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create('general_informations', function (Blueprint $table) {
$table->increments('id');
$table->string('location')->nullable();
$table->string('topography')->nullable();
$table->string('land_area')->nullable();
$table->string('boundary')->nullable();
$table->string('climate')->nullable();
$table->string('capital')->nullable();
$table->date('date_of_approval')->nullable();
$table->string('legal_basis')->nullable();
$table->integer('no_of_component_city')->default(0);
$table->integer('no_of_municipality')->default(0);
$table->integer('no_of_congressional_district')->default(0);
$table->integer('no_of_barangay')->default(0);
$table->timestamps();
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::dropIfExists('general_informations');
}
}
<file_sep>/app/Http/Controllers/FinancialProfileController.php
<?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use App\AnnualIncomeBudget;
use App\Municipality;
use App\IncomeAndExpenditure;
class FinancialProfileController extends Controller
{
public function getAnnualIncomeBudget(Request $request)
{
if($request->has('year'))
{
return $this->createAnnualIncomeBudget($request->input('year'));
}else{
return $this->createAnnualIncomeBudget(date('o'));
}
}
public function createAnnualIncomeBudget($year)
{
$annualincomebudget = AnnualIncomeBudget::where('year', $year)->get();
if(count($annualincomebudget) <= 0)
{
foreach (Municipality::get() as $value) {
AnnualIncomeBudget::create([
'municipality' => $value->id,
'income' => 0,
'budget' => 0,
'year' => $year,
]);
}
}
return AnnualIncomeBudget::where('year', $year)
->join('municipalities','municipalities.id','=','annual_income_budgets.municipality')
->get();
}
public function updateAnnualIncomeBudget(Request $request)
{
$arr = $request->input('data');
foreach ($arr as $value) {
AnnualIncomeBudget::where('id', $value['id'])->update([
'income' => $value["income"],
'budget' => $value["budget"],
'year' => $value["year"],
]);
}
return 'success';
}
public function getIncomeAndExpenditure(Request $request)
{
if($request->has('year'))
{
return $this->createIncomeAndExpenditure($request->input('year'));
}else{
return $this->createIncomeAndExpenditure(date('o'));
}
}
public function createIncomeAndExpenditure($year)
{
$check = IncomeAndExpenditure::where('year', $year)->first();
if(!isset($check))
{
$check = IncomeAndExpenditure::create([
'income_general_fund' => 0,
'expenditures_obligations' => 0,
'tax_revenue' => 0,
'permit_and_license' => 0,
'service_income' => 0,
'business_income' => 0,
'other_income' => 0,
'grants_and_donation' => 0,
'personal_services' => 0,
'mooe' => 0,
'capitals_outlays' => 0,
'year' => $year,
]);
}
return $check;
}
public function updateIncomeAndExpenditure(Request $request)
{
$check = IncomeAndExpenditure::where('id', $request->input('id'))
->update([
'income_general_fund' => $request->input('income_general_fund'),
'expenditures_obligations' => $request->input('expenditures_obligations'),
'tax_revenue' => $request->input('tax_revenue'),
'permit_and_license' => $request->input('permit_and_license'),
'service_income' => $request->input('service_income'),
'business_income' => $request->input('business_income'),
'other_income' => $request->input('other_income'),
'grants_and_donation' => $request->input('grants_and_donation'),
'personal_services' => $request->input('personal_services'),
'mooe' => $request->input('mooe'),
'capitals_outlays' => $request->input('capitals_outlays'),
]);
return 'success';
}
}
<file_sep>/app/Http/Controllers/InstitutionalProfileController.php
<?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use App\PersonnelByOffice;
use App\PersonnelByStatus;
use App\WaterSystem;
class InstitutionalProfileController extends Controller
{
public function getPersonnelByOffice(Request $request)
{
if($request->has('year'))
{
return $this->organizeArr($this->createPersonnelByOffice($request->input('year')));
}else{
return $this->organizeArr($this->createPersonnelByOffice(date('o')));
}
}
public function createPersonnelByOffice($year)
{
$arr = PersonnelByOffice::where('year', $year)->get();
if(count($arr) <= 0)
{
PersonnelByOffice::insert(PersonnelByOffice::getDefault($year));
}
return PersonnelByOffice::where('year' ,$year)->get();
}
public function organizeArr($arr)
{
$array = [];
foreach ($arr as $value) {
$array[$value->group][] = [
'id' => $value->id,
'name_of_office' => $value->name_of_office,
'no_plantilla_position' => $value->no_plantilla_position,
'no_of_filled_up_position' => $value->no_of_filled_up_position,
'group' => $value->group,
'year' => $value->year,
];
}
return $array;
}
public function updatePersonnelByOffice(Request $request)
{
$arr = $request->input('data');
foreach ($arr as $value) {
foreach ($value as $final_data) {
PersonnelByOffice::where('id', $final_data['id'])->update([
'name_of_office' => $final_data['name_of_office'],
'no_plantilla_position' => $final_data['no_plantilla_position'],
'no_of_filled_up_position' => $final_data['no_of_filled_up_position'],
'group' => $final_data['group'],
'year' => $final_data['year'],
]);
}
}
return 'success';
}
public function getPersonnelByStatus(Request $request)
{
if($request->has('year'))
{
return $this->createPersonnelByStatus($request->input('year'));
}else{
return $this->createPersonnelByStatus(date('o'));
}
}
public function createPersonnelByStatus($year)
{
$check = PersonnelByStatus::where('year', $year)->first();
if(!isset($check))
{
$check = PersonnelByStatus::create([
'permanent_first_male' => 0,
'permanent_first_female' => 0,
'permanent_second_male' => 0,
'permanent_second_female' => 0,
'co_terminous_first_male' => 0,
'co_terminous_first_female' => 0,
'co_terminous_second_male' => 0,
'co_terminous_second_female' => 0,
'elective_male' => 0,
'elective_female' => 0,
'year' => $year,
]);
}
return $check;
}
public function updatePersonnelByStatus(Request $request)
{
$update = PersonnelByStatus::where('id',$request->input('id'))
->update([
'permanent_first_male' => $request->input('permanent_first_male'),
'permanent_first_female' => $request->input('permanent_first_female'),
'permanent_second_male' => $request->input('permanent_second_male'),
'permanent_second_female' => $request->input('permanent_second_female'),
'co_terminous_first_male' => $request->input('co_terminous_first_male'),
'co_terminous_first_female' => $request->input('co_terminous_first_female'),
'co_terminous_second_male' => $request->input('co_terminous_second_male'),
'co_terminous_second_female' => $request->input('co_terminous_second_female'),
'elective_male' => $request->input('elective_male'),
'elective_female' => $request->input('elective_female'),
]);
}
}
<file_sep>/app/NutritionalStatus.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class NutritionalStatus extends Model
{
protected $fillable = [
'age_range',
'normal_boys',
'normal_girls',
'underweight_boys',
'underweight_girls',
'severely_boys',
'severely_girls',
'overweight_boys',
'overweight_girls',
'year',
];
}
<file_sep>/app/CoconutProduction.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class CoconutProduction extends Model
{
protected $fillable = [
'municipality',
'no_of_coconut_farmers',
'coconut_area',
'no_of_coco_trees',
'non_bearing',
'bearing',
'nut_tree_year',
'wholenuts',
'copra_equivalent',
'year',
];
}
<file_sep>/app/HealthPersonnel.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class HealthPersonnel extends Model
{
protected $fillable = [
'personnel',
'number',
'year'
];
}
<file_sep>/app/DimensionsOfPoverty.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class DimensionsOfPoverty extends Model
{
protected $fillable = [
'indicator',
'household_magnitude',
'household_proportion',
'population_magnitude_total',
'population_magnitude_male',
'population_magnitude_female',
'population_proportion_total',
'population_proportion_male',
'population_proportion_female',
'year',
];
}
<file_sep>/app/HighValueCommercialCrop.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class HighValueCommercialCrop extends Model
{
protected $fillable =[
'commodity',
'farmers_served',
'no_of_trees_planted',
'planted_area',
'production',
'average_yield',
'year'
];
}
<file_sep>/app/LandAreaByMunicipality.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class LandAreaByMunicipality extends Model
{
protected $fillable = ['municipality','land_area','year'];
}
<file_sep>/app/ProjectedPopulation.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class ProjectedPopulation extends Model
{
protected $fillable = [
'projected_population_infos_id',
'province',
'actual_year_1',
'actual_year_2',
'growth_rate',
'projection_year_1',
'projection_year_2',
'projection_year_3',
];
}
<file_sep>/app/CityMunicipality.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class CityMunicipality extends Model
{
protected $table = "ref_citymun";
}
<file_sep>/app/StatusOfPower.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class StatusOfPower extends Model
{
protected $fillable = [
'municipality',
'barangay_covered',
'barangay_energized',
'sitios_energized',
'sitios_unerginized',
'house_connections',
'members_approved',
'year',
];
}
<file_sep>/database/migrations/2018_10_15_052746_create_personnel_by_statuses_table.php
<?php
use Illuminate\Support\Facades\Schema;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Database\Migrations\Migration;
class CreatePersonnelByStatusesTable extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create('personnel_by_statuses', function (Blueprint $table) {
$table->increments('id');
$table->integer('permanent_first_male');
$table->integer('permanent_first_female');
$table->integer('permanent_second_male');
$table->integer('permanent_second_female');
$table->integer('co_terminous_first_male');
$table->integer('co_terminous_first_female');
$table->integer('co_terminous_second_male');
$table->integer('co_terminous_second_female');
$table->integer('elective_male');
$table->integer('elective_female');
$table->year('year');
$table->timestamps();
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::dropIfExists('personnel_by_statuses');
}
}
<file_sep>/app/Http/Controllers/MailController.php
<?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use Mail;
use App\Mail\UserVerification;
class MailController extends Controller
{
public function test_mail()
{
Mail::to('<EMAIL>')
->send(new UserVerification());
echo "mail sent";
}
}
<file_sep>/database/migrations/2018_12_06_095443_create_urban_rural_population_infos_table.php
<?php
use Illuminate\Support\Facades\Schema;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Database\Migrations\Migration;
class CreateUrbanRuralPopulationInfosTable extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create('urban_rural_population_infos', function (Blueprint $table) {
$table->increments('id');
$table->year('population_year_1');
$table->year('population_year_2');
$table->year('year_of_encoding');
$table->timestamps();
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::dropIfExists('urban_rural_population_infos');
}
}
<file_sep>/app/Http/Controllers/UserController.php
<?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use App\User;
use App\AccessList;
use App\UserAccess;
use Auth;
use Hash;
class UserController extends Controller
{
public function getAuth()
{
return Auth::user();
// return User::where('id', Auth::user()->id)->with('access')->first();
}
public function create(Request $request, User $user)
{
// validate form information
$validate = $request->validate([
'password' => '<PASSWORD>|<PASSWORD>|min:6|max:255',
'username' => 'required|unique:users,email|min:3|max:255',
'name' => 'required|min:3|max:255',
]);
// add user to our database
$create = $user::create([
'email' => $request->input('username'),
'password' => <PASSWORD>($request->input('password')),
'name' => $request->input('name'),
]);
return $this->returnMessage($create, 'success', 'error');
}
public function changePassword(Request $request)
{
$id = Auth::user()->id;
$user = User::where('id', $id)->first();
$password = $user->password;
$request->validate([
'password' => '<PASSWORD>',
'old_password' => '<PASSWORD>',
]);
$new_password = $request->input('password');
$old_password = $request->input('old_password');
if(Hash::check($old_password, $password))
{
$user->password = <PASSWORD>($new_password);
$user->save();
return [
'type' => 'success',
'message' => "Password is now updated!",
];
}else{
return [
'type' => 'error',
'message' => "Old password doesnt match!",
];
}
}
// retreive all user
public function getUserList(User $user)
{
return User::get();
}
// access contoller
public function create_new_access(Request $request, AccessList $ACCESS_LIST)
{
$validate = $request->validate([
'access_key' => 'required|unique:access_lists',
'access_name' => 'required',
'description' => 'required',
]);
// try creating new access
$create = $ACCESS_LIST::create([
'access_key' => $request->input('access_key'),
'access_name' => $request->input('access_name'),
'description' => $request->input('description'),
]);
return $this->returnMessage($create, 'success', 'error');
}
public function get_all_access(AccessList $ACCESS_LIST)
{
return $ACCESS_LIST::get();
}
public function delete_access(Request $request, AccessList $ACCESS_LIST)
{
$validate = $request->validate([
'id' => 'required'
]);
$delete = $ACCESS_LIST::where('id', $request->input('id'))
->delete();
return $this->returnMessage($delete, 'success', 'error');
}
public function add_user_access(Request $request, UserAccess $USER_ACCESS)
{
$validate = $request->validate([
'user_id' => 'required',
'access_id' => 'required',
]);
$find = $USER_ACCESS::where([
'user_id' => $request->input('user_id'),
'access_id' => $request->input('access_id'),
])->get();
if(count($find) <= 0)
{
$create = $USER_ACCESS::create([
'user_id' => $request->input('user_id'),
'access_id' => $request->input('access_id'),
]);
return $this->returnMessage($create, 'success', 'error');
}else{
return $this->returnMessageWithThrowError('error_message', 402);
}
}
public function get_user_access(Request $request, UserAccess $USER_ACCESS)
{
$validate = $request->validate([
'id' => 'required'
]);
$find = $USER_ACCESS::where('user_id', $request->input('id'))
->with('access')
->get();
return $find;
}
public function remove_user_access(Request $request, UserAccess $USER_ACCESS)
{
$validate = $request->validate([
'user_id' => 'required'
]);
$delete = $USER_ACCESS::where([
'user_id' => $request->input('user_id'),
'access_id' => $request->input('access_id'),
])->delete();
return $this->returnMessage($delete, 'success', 'error');
}
public function returnMessage($bool, $success_message, $error_message)
{
if($bool)
{
return $success_message;
}else{
return $error_message;
}
}
public function returnMessageWithThrowError($error_message, $errorcode)
{
return response($error_message, $errorcode)
->header('Content-Type', 'text/plain');
}
}
<file_sep>/app/IncomeAndExpenditure.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class IncomeAndExpenditure extends Model
{
protected $fillable = [
'income_general_fund',
'expenditures_obligations',
'tax_revenue',
'permit_and_license',
'service_income',
'business_income',
'other_income',
'grants_and_donation',
'personal_services',
'mooe',
'capitals_outlays',
'year',
];
}
<file_sep>/database/migrations/2018_12_01_020603_create_license_permit_issueds_table.php
<?php
use Illuminate\Support\Facades\Schema;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Database\Migrations\Migration;
class CreateLicensePermitIssuedsTable extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create('license_permit_issueds', function (Blueprint $table) {
$table->increments('id');
$table->integer("pro_new_first_sem")->default(0);
$table->integer("pro_new_second_sem")->default(0);
$table->integer("pro_renew_first_sem")->default(0);
$table->integer("pro_renew_second_sem")->default(0);
$table->integer("non_pro_new_first_sem")->default(0);
$table->integer("non_pro_new_second_sem")->default(0);
$table->integer("non_pro_renew_first_sem")->default(0);
$table->integer("non_pro_renew_second_sem")->default(0);
$table->integer("student_first_sem")->default(0);
$table->integer("student_second_sem")->default(0);
$table->year('year');
$table->timestamps();
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::dropIfExists('license_permit_issueds');
}
}
<file_sep>/app/Imports/CbmsImport.php
<?php
namespace App\Imports;
use Illuminate\Support\Collection;
use Maatwebsite\Excel\Concerns\ToCollection;
use Maatwebsite\Excel\Concerns\WithMultipleSheets;
use App\Imports\CbmsProvinceImport;
class CbmsImport implements ToCollection, WithMultipleSheets
{
public function __construct(string $year)
{
$this->year = $year;
}
public function collection(Collection $collection)
{
return $collection;
}
public function sheets(): array
{
return [
'province' => new CbmsProvinceImport((int) $this->year),
];
}
}
<file_sep>/app/Http/Controllers/SocialDevelopmentController.php
<?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use App\ComparativeLaborForce;
use App\ComparativeEmploymentStatus;
use App\PovertyIncidence;
use App\Municipality;
use App\NumberRateBdimd;
use App\DohLicensedHealthFacility;
use App\NutritionalStatus;
use App\HealthFacility;
use App\HealthPersonnel;
use App\SeniorCitizenAndPersonWithDisability;
use App\CrimeStatisticsAndProtectiveFacility;
use App\EducationFacilities;
use App\EnrollmentInGovernmentAndPrivateSchool;
use App\PerformanceIndicatorInPublicSchool;
use App\HealthMorbidity;
use App\HealthInfantMorbidity;
use App\HealthMortality;
use App\HealthInfantMortality;
use App\DimensionsOfPoverty;
use App\FirePrevention;
use App\SocialWelfareService;
class SocialDevelopmentController extends Controller
{
public function getComparativeLaborForce()
{
$info = ComparativeLaborForce::get();
$arr["data"] = [];
$arr["year"] = [];
$years = ComparativeLaborForce::groupBy('year')->get();
$year = [];
foreach ($years as $value) {
$year[] = $value->year;
}
foreach ($info as $value) {
$arr["data"][$value->labor_employment][] = [
'id' => $value->id,
'year' => $value->year,
'rate' => $value->rate,
];
}
$arr["year"] = $year;
return $arr;
}
public function createComparativeLaborForce(Request $request)
{
$labor_employment = ["Labor Force Participation Rate", "Employment Rate","Unemployment Rate","Underemployment Rate"];
foreach ($labor_employment as $value) {
$check = ComparativeLaborForce::where([
'year' => $request->input('year'),
'labor_employment' => $value,
])->first();
if(!$check)
{
$create = ComparativeLaborForce::create([
'labor_employment' => $value,
'rate' => 0,
'year' => $request->input('year'),
]);
}
}
return 'true';
}
public function updateComparativeLaborForce(Request $request)
{
$data = $request->input('data');
foreach ($data as $value) {
foreach ($value as $info) {
$update = ComparativeLaborForce::where('id', $info["id"])
->update(['rate' => $info["rate"]]);
}
}
return 'success';
}
public function getComparativeEmploymentStatus()
{
$info = ComparativeEmploymentStatus::get();
$arr["data"] = [];
$arr["year"] = [];
$years = ComparativeEmploymentStatus::groupBy('year')->get();
$year = [];
foreach ($years as $value) {
$year[] = $value->year;
}
foreach ($info as $value) {
$arr["data"][$value->labor_employment][] = [
'id' => $value->id,
'year' => $value->year,
'number' => $value->number,
];
}
$arr["year"] = $year;
return $arr;
}
public function createComparativeEmploymentStatus(Request $request)
{
$labor_employment = ["Total in the labor force", "Total Employed","Total Unemployed"];
foreach ($labor_employment as $value) {
$check = ComparativeEmploymentStatus::where([
'year' => $request->input('year'),
'labor_employment' => $value,
])->first();
if(!$check)
{
$create = ComparativeEmploymentStatus::create([
'labor_employment' => $value,
'number' => 0,
'year' => $request->input('year'),
]);
}
}
return 'true';
}
public function updateComparativeEmploymentStatus(Request $request)
{
$data = $request->input('data');
foreach ($data as $value) {
foreach ($value as $info) {
$update = ComparativeEmploymentStatus::where('id', $info["id"])
->update(['number' => $info["number"]]);
}
}
return 'success';
}
public function getPovertyIncidence()
{
$info = PovertyIncidence::join('municipalities','municipalities.id','=','poverty_incidences.municipality')
->select('municipalities.id as municipality','municipalities.municipality as municipality_name','poverty_incidences.*','municipalities.district')
->get();
$arr["data"] = [];
$arr["year"] = [];
$years = PovertyIncidence::groupBy('year')->get();
$year = [];
foreach ($years as $value) {
$year[] = $value->year;
}
foreach ($info as $value) {
$arr["data"][$value->municipality_name][] = [
'year' => $value->year,
'id' => $value->id,
'poverty_incidence' => $value->poverty_incidence,
];
}
$arr["year"] = $year;
return $arr;
}
public function createPovertyIncidence(Request $request)
{
foreach (Municipality::get() as $value) {
$check = PovertyIncidence::where([
'year' => $request->input('year'),
'municipality' => $value->id,
])->first();
if(!$check)
{
$create = PovertyIncidence::create([
'municipality' => $value->id,
'poverty_incidence' => 0,
'year' => $request->input('year'),
]);
}
}
return 'true';
}
public function updatePovertyIncidence(Request $request)
{
$data = $request->input('data');
foreach ($data as $value) {
foreach ($value as $info) {
$update = PovertyIncidence::where('id', $info["id"])
->update(['poverty_incidence' => $info["poverty_incidence"]]);
}
}
return 'success';
}
public function getNumberRateBdimd()
{
$info = NumberRateBdimd::get();
$arr["data"] = [];
$arr["year"] = [];
$years = NumberRateBdimd::groupBy('year')->get();
$year = [];
foreach ($years as $value) {
$year[] = $value->year;
}
foreach ($info as $value) {
$arr["data"][$value->indicator][] = [
'year' => $value->year,
'id' => $value->id,
'number' => $value->number,
'rate' => $value->rate,
];
}
$arr["year"] = $year;
return $arr;
}
public function createNumberRateBdimd(Request $request)
{
$labor_employment = ["Births", "Deaths","Infant Deaths","Maternal Deaths"];
foreach ($labor_employment as $value) {
$check = NumberRateBdimd::where([
'indicator' => $value,
'year' => $request->input('year'),
])->first();
if(!$check)
{
$create = NumberRateBdimd::create([
'indicator' => $value,
'number' => 0,
'rate' => 0,
'year' => $request->input('year'),
]);
}
}
return 'true';
}
public function updateNumberRateBdimd(Request $request)
{
$data = $request->input('data');
foreach ($data as $value) {
foreach ($value as $info) {
$update = NumberRateBdimd::where('id', $info["id"])
->update([
'number' => $info["number"],
'rate' => $info["rate"]
]);
}
}
return 'success';
}
public function getDohLicensedHealthFacility()
{
return DohLicensedHealthFacility::get();
}
public function createDohLicensedHealthFacility(Request $request)
{
$create = DohLicensedHealthFacility::create([
'name_of_facility' => $request->input('name_of_facility'),
'address' => $request->input('address'),
'classification' => $request->input('classification'),
]);
return 'success';
}
public function updateDohLicensedHealthFacility(Request $request)
{
$update = DohLicensedHealthFacility::where('id', $request->input('id'))
->update([
'name_of_facility' => $request->input('name_of_facility'),
'address' => $request->input('address'),
'classification' => $request->input('classification'),
]);
return 'success';
}
public function deleteDohLicensedHealthFacility(Request $request)
{
$delete = DohLicensedHealthFacility::where('id', $request->input('id'))->delete();
return "success";
}
public function getNutritionalStatus(Request $request)
{
$year = date('o');
if($request->has('year'))
{
$year = $request->input('year');
}
return $this->createNutritionalStatus($year);
}
public function createNutritionalStatus($year)
{
$check = NutritionalStatus::where('year', $year)->get();
$arr = ["0-5", "6-11","12-23","24-35","36-47","48-59","60-71"];
if(count($check) <= 0)
{
foreach ($arr as $value) {
$create = NutritionalStatus::create([
'age_range' => $value,
'normal_boys' => 0,
'normal_girls' => 0,
'underweight_boys' => 0,
'underweight_girls' => 0,
'severely_boys' => 0,
'severely_girls' => 0,
'overweight_boys' => 0,
'overweight_girls' => 0,
'year' => $year,
]);
}
}
return NutritionalStatus::where('year', $year)->get();
}
public function updateNutritionalStatus(Request $request)
{
$data = $request->input('data');
foreach ($data as $value) {
$update = NutritionalStatus::where('id', $value["id"])
->update([
'age_range' => $value["age_range"],
'normal_boys' => $value["normal_boys"],
'normal_girls' => $value["normal_girls"],
'underweight_boys' => $value["underweight_boys"],
'underweight_girls' => $value["underweight_girls"],
'severely_boys' => $value["severely_boys"],
'severely_girls' => $value["severely_girls"],
'overweight_boys' => $value["overweight_boys"],
'overweight_girls' => $value["overweight_girls"],
]);
}
return "success";
}
public function getHealthFacility(Request $request)
{
$year = date('o');
if($request->has('year'))
{
$year = $request->input('year');
}
return $this->createHealthFacility($year);
}
public function createHealthFacility($year)
{
$check = HealthFacility::where('year', $year)->get();
$arr = [
"Private Hospitals",
"Government Owned Hospitals",
"Private Clinics",
"Rural Health Units",
"City Health Office",
"Barangay Health Station ",
"Botika ng Lalawigan",
"Botika ng Bayan",
];
if(count($check) <= 0)
{
foreach ($arr as $value) {
$create = HealthFacility::create([
'facility' => $value,
'number' => 0,
'year' => $year,
]);
}
}
return HealthFacility::where('year', $year)->get();
}
public function updateHealthFacility(Request $request)
{
$data = $request->input('data');
foreach ($data as $value) {
$update = HealthFacility::where('id', $value["id"])
->update([
'number' => $value["number"],
]);
}
return "success";
}
public function getHealthPersonnel(Request $request)
{
$year = date('o');
if($request->has('year'))
{
$year = $request->input('year');
}
return $this->createHealthPersonnel($year);
}
public function createHealthPersonnel($year)
{
$check = HealthPersonnel::where('year', $year)->get();
$arr = [
"Physicians",
"Nurses",
"Nutritionist",
"Medical Technologist",
"Dentists",
"Midwives",
"Sanitary Inspectors",
"Active BHWs",
];
if(count($check) <= 0)
{
foreach ($arr as $value) {
$create = HealthPersonnel::create([
'personnel' => $value,
'number' => 0,
'year' => $year,
]);
}
}
return HealthPersonnel::where('year', $year)->get();
}
public function updateHealthPersonnel(Request $request)
{
$data = $request->input('data');
foreach ($data as $value) {
$update = HealthPersonnel::where('id', $value["id"])
->update([
'number' => $value["number"],
]);
}
return "success";
}
public function getSeniorCitizenAndPersonWithDisability(Request $request)
{
$year = date('o');
if($request->has('year'))
{
$year = $request->input('year');
}
return $this->createSeniorCitizenAndPersonWithDisability($year);
}
public function createSeniorCitizenAndPersonWithDisability($year)
{
$check = SeniorCitizenAndPersonWithDisability::where('year', $year)->get();
if(count($check) <= 0)
{
foreach (Municipality::get() as $value) {
$create = SeniorCitizenAndPersonWithDisability::create([
'municipality' => $value->id,
'senior_male' => 0,
'senior_female' => 0,
'disability_male' => 0,
'disability_female' => 0,
'year' => $year,
]);
}
}
return SeniorCitizenAndPersonWithDisability::where('year', $year)
->join('municipalities','municipalities.id','=','senior_citizen_and_person_with_disabilities.municipality')
->select('municipalities.id as municipality','municipalities.municipality as municipality_name','senior_citizen_and_person_with_disabilities.*','municipalities.district')
->get();
}
public function updateSeniorCitizenAndPersonWithDisability(Request $request)
{
$data = $request->input('data');
foreach ($data as $value) {
$update = SeniorCitizenAndPersonWithDisability::where('id', $value["id"])
->update([
'senior_male' => $value["senior_male"],
'senior_female' => $value["senior_female"],
'disability_male' => $value["disability_male"],
'disability_female' => $value["disability_female"],
]);
}
return "success";
}
public function getCrimeStatisticsAndProtectiveFacility(Request $request)
{
$year = date('o');
if($request->has('year'))
{
$request->input('year');
}
$info = $this->createCrimeStatisticsAndProtectiveFacility($year);
$arr = [];
foreach ($info as $key => $value) {
$arr[$value->group_crime][] = [
"id" => $value->id,
"crime" => $value->crime,
"number" => $value->number,
];
}
return $arr;
}
public function createCrimeStatisticsAndProtectiveFacility($year)
{
$crime_statistics = [
"Crime Volume",
"Crime Solved",
"Crime Unsolved",
"Crime Clearance Efficiency (%)",
"Crime Solution Efficiency (%)",
"Ave. Monthly Crime Rate (AMCR) (%)"
];
$crime_against_person = [
"Murder",
"Homicide",
"Physical Injuries",
"Rape"
];
$crime_against_property = [
"Robbery", "Theft", "Carnapping"
];
$count_crime = CrimeStatisticsAndProtectiveFacility::where('year', $year)->get();
if(count($count_crime) <= 0)
{
foreach ($crime_statistics as $statistic) {
CrimeStatisticsAndProtectiveFacility::create([
'group_crime' => "Crime Statistics",
'crime' => $statistic,
'number' => 0,
'year' => $year,
]);
}
foreach ($crime_against_person as $person) {
CrimeStatisticsAndProtectiveFacility::create([
'group_crime' => "Crime Against Person",
'crime' => $person,
'number' => 0,
'year' => $year,
]);
}
foreach ($crime_against_property as $property) {
CrimeStatisticsAndProtectiveFacility::create([
'group_crime' => "Crime Against Property",
'crime' => $property,
'number' => 0,
'year' => $year,
]);
}
}
return CrimeStatisticsAndProtectiveFacility::where('year', $year)->get();
}
public function updateCrimeStatisticsAndProtectiveFacility(Request $request)
{
$data = $request->input('data');
foreach ($data as $crime) {
foreach ($crime as $value) {
CrimeStatisticsAndProtectiveFacility::where('id', $value["id"])
->update([
'number' => $value["number"]
]);
}
}
}
public function getEducationFacilities(Request $request)
{
$year = date('o');
if($request->has('year'))
{
$year = $request->input('year');
}
return EducationFacilities::where('year', $year)->get();
}
public function createEducationFacilities(Request $request)
{
$create = EducationFacilities::create([
'facilities' => $request->input('facilities'),
'province' => 0,
'calapan' => 0,
'year' => $request->input('year') ?? date('o'),
]);
return 'success';
}
public function deleteEducationFacilities(Request $request)
{
$delete = EducationFacilities::where('id', $request->input('id'))->delete();
return 'success';
}
public function updateEducationFacilities(Request $request)
{
$data = $request->input('data');
foreach ($data as $value) {
$update = EducationFacilities::where('id', $value['id'])->update([
'province' => $value["province"] ?? 0,
'calapan' => $value["calapan"] ?? 0,
]);
}
return 'success';
}
public function getEnrollmentInGovernmentAndPrivateSchool(Request $request)
{
$year = date('o');
if($request->has('year'))
{
$year = $request->input('year') ?? date('o');
}
return $this->createEnrollmentInGovernmentAndPrivateSchool($year);
}
public function createEnrollmentInGovernmentAndPrivateSchool($year)
{
$educational_level = ["Pre-Elementary", "Elementary","Junior High School","Senior High School"];
$check = EnrollmentInGovernmentAndPrivateSchool::where('year', $year)->get();
if(count($check) <= 0)
{
foreach ($educational_level as $value) {
$create = EnrollmentInGovernmentAndPrivateSchool::create([
'educational_level' => $value,
'province_public' => 0,
'province_private' => 0,
'calapan_public' => 0,
'calapan_private' => 0,
'calapan_luc_suc' => 0,
'year' => $year,
]);
}
}
return EnrollmentInGovernmentAndPrivateSchool::where('year', $year)->get();
}
public function updateEnrollmentInGovernmentAndPrivateSchool(Request $request)
{
$data = $request->input('data');
foreach ($data as $value) {
$update = EnrollmentInGovernmentAndPrivateSchool::where('id', $value["id"])
->update([
'province_public' => $value["province_public"],
'province_private' => $value["province_private"],
'calapan_public' => $value["calapan_public"],
'calapan_private' => $value["calapan_private"],
'calapan_luc_suc' => $value["calapan_luc_suc"],
]);
}
return 'success';
}
public function getPerformanceIndicatorInPublicSchool(Request $request)
{
$year = date('o');
if($request->has('year'))
{
$year = $request->input('year') ?? date('o');
}
return $this->createPerformanceIndicatorInPublicSchool($year);
}
public function createPerformanceIndicatorInPublicSchool($year)
{
$arr = [
"Net Enrolment Rate",
"Cohort Survival Rate",
"School Leavers Rate",
"Promotion Rate",
"Completion Rate"
];
$check = PerformanceIndicatorInPublicSchool::where('year', $year)->get();
if(count($check) <= 0)
{
foreach ($arr as $value) {
$create = PerformanceIndicatorInPublicSchool::create([
'indicator' => $value,
'province_elementary' => 0,
'province_secondary' => 0,
'calapan_elementary' => 0,
'calapan_secondary' => 0,
'year' => $year,
]);
}
}
return PerformanceIndicatorInPublicSchool::where('year', $year)->get();
}
public function updatePerformanceIndicatorInPublicSchool(Request $request)
{
$data = $request->input('data');
foreach ($data as $value) {
$update = PerformanceIndicatorInPublicSchool::where('id', $value["id"])->update([
'province_elementary' => $value["province_elementary"],
'province_secondary' => $value["province_secondary"],
'calapan_elementary' => $value["calapan_elementary"],
'calapan_secondary' => $value["calapan_secondary"],
]);
}
return 'success';
}
public function getHealthMorbidity(Request $request)
{
$year = date('o');
if($request->has('year'))
{
$year = $request->input('year') ?? date('o');
}
return HealthMorbidity::where('year', $year)->get();
}
public function createHealthMorbidity(Request $request)
{
$create = HealthMorbidity::create([
'cause' => $request->input('cause'),
'year_no' => 0,
'year_rate' => 0,
'five_year_no' => 0,
'five_year_rate' => 0,
'year' => $request->input('year'),
]);
}
public function updateHealthMorbidity(Request $request)
{
$data = $request->input('data');
foreach ($data as $value) {
$update = HealthMorbidity::where('id', $value["id"])
->update([
'year_no' => $value["year_no"],
'year_rate' => $value["year_rate"],
'five_year_no' => $value["five_year_no"],
'five_year_rate' => $value["five_year_rate"],
]);
}
return 'success';
}
public function deleteHealthMorbidity(Request $request)
{
$delete = HealthMorbidity::where('id', $request->input('id'))->delete();
return 'success';
}
public function getHealthInfantMorbidity(Request $request)
{
$year = date('o');
if($request->has('year'))
{
$year = $request->input('year') ?? date('o');
}
return HealthInfantMorbidity::where('year', $year)->get();
}
public function createHealthInfantMorbidity(Request $request)
{
$create = HealthInfantMorbidity::create([
'cause' => $request->input('cause'),
'year_no' => 0,
'year_rate' => 0,
'five_year_no' => 0,
'five_year_rate' => 0,
'year' => $request->input('year'),
]);
}
public function updateHealthInfantMorbidity(Request $request)
{
$data = $request->input('data');
foreach ($data as $value) {
$update = HealthInfantMorbidity::where('id', $value["id"])
->update([
'year_no' => $value["year_no"],
'year_rate' => $value["year_rate"],
'five_year_no' => $value["five_year_no"],
'five_year_rate' => $value["five_year_rate"],
]);
}
return 'success';
}
public function deleteHealthInfantMorbidity(Request $request)
{
$delete = HealthInfantMorbidity::where('id', $request->input('id'))->delete();
return 'success';
}
public function getHealthMortality(Request $request)
{
$year = date('o');
if($request->has('year'))
{
$year = $request->input('year') ?? date('o');
}
return HealthMortality::where('year', $year)->get();
}
public function createHealthMortality(Request $request)
{
$create = HealthMortality::create([
'cause' => $request->input('cause'),
'year_no' => 0,
'year_rate' => 0,
'five_year_no' => 0,
'five_year_rate' => 0,
'year' => $request->input('year'),
]);
}
public function updateHealthMortality(Request $request)
{
$data = $request->input('data');
foreach ($data as $value) {
$update = HealthMortality::where('id', $value["id"])
->update([
'year_no' => $value["year_no"],
'year_rate' => $value["year_rate"],
'five_year_no' => $value["five_year_no"],
'five_year_rate' => $value["five_year_rate"],
]);
}
return 'success';
}
public function deleteHealthMortality(Request $request)
{
$delete = HealthInfantMorbidity::where('id', $request->input('id'))->delete();
return 'success';
}
public function getHealthInfantMortality(Request $request)
{
$year = date('o');
if($request->has('year'))
{
$year = $request->input('year') ?? date('o');
}
return HealthInfantMortality::where('year', $year)->get();
}
public function createHealthInfantMortality(Request $request)
{
$create = HealthInfantMortality::create([
'cause' => $request->input('cause'),
'year_no' => 0,
'year_rate' => 0,
'five_year_no' => 0,
'five_year_rate' => 0,
'year' => $request->input('year'),
]);
}
public function updateHealthInfantMortality(Request $request)
{
$data = $request->input('data');
foreach ($data as $value) {
$update = HealthInfantMortality::where('id', $value["id"])
->update([
'year_no' => $value["year_no"],
'year_rate' => $value["year_rate"],
'five_year_no' => $value["five_year_no"],
'five_year_rate' => $value["five_year_rate"],
]);
}
return 'success';
}
public function deleteHealthInfantMortality(Request $request)
{
$delete = HealthInfantMortality::where('id', $request->input('id'))->delete();
return 'success';
}
public function getDimensionsOfPoverty(Request $request)
{
$year = date('o');
if($request->has('year'))
{
$year = $request->input('year');
}
return $this->createDimensionsOfPoverty($year);
}
public function updateDimensionsOfPoverty(Request $request)
{
}
public function createDimensionsOfPoverty($year)
{
$check = DimensionsOfPoverty::where('year', $year)->get();
return $check;
}
public function getFirePrevention(Request $request)
{
$year = date('o');
if($request->has('year'))
{
$year = $request->input('year');
}
return $this->createFirePrevention($year);
}
public function createFirePrevention($year)
{
$check = FirePrevention::where('year', $year)->get();
if(count($check) <= 0)
{
$municipality = [
'Office of the Provincial Fire Marshal',
'Baco',
'Bansud',
'Bongabong',
'Bulalacao',
'Calapan City',
'Gloria',
'Mansalay',
'Naujan',
'Pinamalayan',
'Pola',
'Puerta Galera',
'Roxas',
'San Teodoro',
'Socorro',
'Victoria',
];
foreach ($municipality as $value) {
FirePrevention::create([
'municipality' => $value,
'year' => $year,
]);
}
}
return FirePrevention::where('year', $year)->get();
}
public function updateFirePrevention(Request $request)
{
$data = $request->input('data');
foreach ($data as $value) {
$update = FirePrevention::where('id', $value["id"])
->update([
'no_of_fire_station_bfp' => $value["no_of_fire_station_bfp"],
'no_of_fire_station_lgu' => $value["no_of_fire_station_lgu"],
'no_of_fire_trucks_bfp' => $value["no_of_fire_trucks_bfp"],
'no_of_fire_trucks_lgu' => $value["no_of_fire_trucks_lgu"],
'no_of_fire_trucks_ngo' => $value["no_of_fire_trucks_ngo"],
'no_of_personnel' => $value["no_of_personnel"],
]);
}
return 'success';
}
public function getSocialWelfareService(Request $request)
{
$year = date('o');
if($request->has('year'))
{
$year = $request->input('year');
}
return $this->createSocialWelfareService($year);
}
public function createSocialWelfareService($year)
{
$check = SocialWelfareService::where('year', $year)->get();
if(count($check) <= 0)
{
$municipality = [
'Baco',
'Bansud',
'Bongabong',
'Bulalacao',
'Calapan City',
'Gloria',
'Mansalay',
'Naujan',
'Pinamalayan',
'Pola',
'Puerta Galera',
'Roxas',
'San Teodoro',
'Socorro',
'Victoria',
'PGOrM-PSWDO',
'DSWD',
];
foreach ($municipality as $value) {
SocialWelfareService::create([
'municipality' => $value,
'year' => $year,
]);
}
}
return SocialWelfareService::where('year', $year)->get();
}
public function updateSocialWelfareService(Request $request)
{
$data = $request->input('data');
foreach ($data as $value) {
$update = SocialWelfareService::where('id', $value["id"])
->update([
'social_worker_male' => $value["social_worker_male"],
'social_worker_female' => $value["social_worker_female"],
'day_care_male' => $value["day_care_male"],
'day_care_female' => $value["day_care_female"],
'no_of_day_care_centers' => $value["no_of_day_care_centers"],
'children_served_male' => $value["children_served_male"],
'children_served_female' => $value["children_served_female"],
]);
}
return 'success';
}
}
<file_sep>/app/EducationFacilities.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class EducationFacilities extends Model
{
protected $fillable = [
"facilities",
"province",
"calapan",
"year",
];
}
<file_sep>/app/HealthInfantMorbidity.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class HealthInfantMorbidity extends Model
{
protected $fillable = [
'cause',
'year_no',
'year_rate',
'five_year_no',
'five_year_rate',
'year',
];
}
<file_sep>/app/Imports/CbmsProvinceImport.php
<?php
namespace App\Imports;
use Illuminate\Support\Collection;
use Maatwebsite\Excel\Concerns\ToCollection;
use App\DimensionsOfPoverty;
class CbmsProvinceImport implements ToCollection
{
/**
* @param Collection $collection
*/
public function __construct(int $year)
{
$this->year = $year;
}
public function collection(Collection $collection)
{
for($i = 9; $i <= 59;$i++ )
{
DimensionsOfPoverty::create([
'indicator' => $collection[$i][0],
'household_magnitude' => $collection[$i][1],
'household_proportion' => $collection[$i][2],
'population_magnitude_total' => $collection[$i][3],
'population_magnitude_male' => $collection[$i][4],
'population_magnitude_female' => $collection[$i][5],
'population_proportion_total' => $collection[$i][6],
'population_proportion_male' => $collection[$i][7],
'population_proportion_female' => $collection[$i][8],
'year' => $this->year,
]);
}
return $collection;
}
}
<file_sep>/database/migrations/2018_10_08_075057_create_established_marine_protected_areas_table.php
<?php
use Illuminate\Support\Facades\Schema;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Database\Migrations\Migration;
class CreateEstablishedMarineProtectedAreasTable extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create('established_marine_protected_areas', function (Blueprint $table) {
$table->increments('id');
$table->integer('municipality');
$table->string('name_of_mpa');
$table->string('location');
$table->year('year_stablished');
$table->decimal('estimated_area', 10, 2)->default(0);
$table->year('year');
$table->timestamps();
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::dropIfExists('established_marine_protected_areas');
}
}
<file_sep>/app/Communication.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class Communication extends Model
{
protected $fillable = [
'municipality',
'smart_communication',
'globe_telecom',
'year',
];
}
<file_sep>/app/Project.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
use Illuminate\Database\Eloquent\SoftDeletes;
class Project extends Model
{
use SoftDeletes;
protected $dates = ['deleted_at'];
protected $fillable = [
'title',
'control_number',
'mode_of_implementation',
'loc_barangay',
'loc_municipality',
'implementing_agency',
'other_implementing_agency',
'municipality',
'fund_source',
'fund_source_year',
'sector',
'project_cost',
'status',
'monitored_by',
'monitoring_status',
'date_monitored',
'link',
'contractor'
];
public function rel_implementing_agency()
{
return $this->hasOne('App\ImplementorAgencyList','id', 'implementing_agency');
}
public function location_barangay()
{
return $this->hasOne('App\Barangay', 'id', 'loc_barangay');
}
public function location_municipality()
{
return $this->hasOne('App\Municipality', 'id', 'loc_municipality');
}
public function rel_municipality()
{
return $this->hasOne('App\Municipality', 'id','municipality');
}
public function rel_monitor_by()
{
return $this->hasOne('App\MonitoredByList', 'id', 'monitored_by');
}
public function getProjectCostAttribute($value)
{
return number_format($value, 2);
}
public function getDateMonitoredAttribute($value)
{
if($value == "")
{
return "-";
}
return date('F d, o', strtotime($value));
}
}
<file_sep>/app/SocialWelfareService.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class SocialWelfareService extends Model
{
protected $fillable = [
'municipality',
'social_worker_male',
'social_worker_female',
'day_care_male',
'day_care_female',
'no_of_day_care_centers',
'children_served_male',
'children_served_female',
'year',
];
}
<file_sep>/app/Municipality.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class Municipality extends Model
{
protected $fillable = [
'municipality',
'district',
'psgc',
];
public static function with_watershed(){
$district = [1,2,3,4,5,7,9,10,11,12,13,14,15];
return Municipality::whereIn('id', $district)->get();
}
}
<file_sep>/app/ActualProjectedInfo.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class ActualProjectedInfo extends Model
{
protected $fillable = [
'old_year',
'actual_year',
'projected_year',
'growth_rate_1',
'growth_rate_2',
];
}
<file_sep>/database/migrations/2018_09_18_014759_create_projects_table.php
<?php
use Illuminate\Support\Facades\Schema;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Database\Migrations\Migration;
class CreateProjectsTable extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create('projects', function (Blueprint $table) {
$table->increments('id');
$table->string('title');
$table->string('control_number');
$table->boolean('mode_of_implementation');
$table->string('contractor')->nullable();
$table->integer('loc_barangay')->nullable();
$table->integer('loc_municipality')->nullable();
$table->integer('implementing_agency');
$table->string('other_implementing_agency')->nullable();
$table->integer('municipality')->nullable();
$table->integer('fund_source');
$table->year('fund_source_year');
$table->string('specific_fund_source')->nullable();
$table->integer('sector');
$table->decimal('project_cost', 20, 4);
$table->boolean('status')->default(0);
$table->integer('monitored_by')->default(5);
$table->date('date_monitored')->nullable();
$table->boolean('monitoring_status')->default(0);
$table->string('link')->nullable();
$table->softDeletes();
$table->timestamps();
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::dropIfExists('projects');
}
}
<file_sep>/resources/assets/js/vue/components/index.js
require('./facts_and_figure')
import Vue from 'vue';
import navbar from './layouts/nav.vue'
import sidenav from './layouts/sidenav.vue'
import modal from './other/modal.vue'
import dtable from './other/table.vue'
import currency_input from './other/currency_input.vue'
import currency_label from './other/currency_label.vue'
// Components
//Chart
import PieChart from './chart/Pie.vue'
import FileUpload from './file/fileupload.vue'
Vue.component('fileupload', FileUpload);
Vue.component('navbar', navbar);
Vue.component('sidenav', sidenav);
Vue.component('modal', modal);
Vue.component('v-datatable', dtable);
Vue.component('currency_input', currency_input);
Vue.component('currency_label', currency_label);
Vue.component('v-piechart', PieChart);
import sum_of_object from './other/sum_of_object.vue'
Vue.component('sum_of_object', sum_of_object);
<file_sep>/resources/assets/js/vue/index.js
import Vue from 'vue';
import App from './view/App.vue'
import store from './store'
import router from './routes'
import components from './components'
import VuePaginate from 'vue-paginate'
Vue.use(VuePaginate)
import DatePicker from 'vue2-datepicker'
DatePicker.fecha.format(new Date(), 'YYYY-MM-DDTHH:mm:ss')
Vue.use(DatePicker);
import vSelect from 'vue-select'
Vue.component('v-select', vSelect)
import VueToastr from '@deveodk/vue-toastr'
import '@deveodk/vue-toastr/dist/@deveodk/vue-toastr.css'
Vue.use(require('vue-moment'));
import Font from './font-awesome'
import VueSweetalert2 from 'vue-sweetalert2';
var sweetalert_option = {
confirmButtonClass: 'btn btn-success ml-1',
cancelButtonClass: 'btn btn-danger ml-1',
buttonsStyling: false,
};
Vue.use(VueSweetalert2, sweetalert_option);
Vue.use(VueToastr, {
defaultPosition: 'toast-bottom-left',
defaultType: 'info',
defaultTimeout: 5000
});
const app = new Vue({
el: '#app',
template: '<app></app>',
components: {App},
router,
store
})<file_sep>/app/GeneralInformation.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class GeneralInformation extends Model
{
protected $fillable = [
'location',
'topography',
'land_area',
'boundary',
'climate',
'capital',
'date_of_approval',
'legal_basis',
'no_of_component_city',
'no_of_municipality',
'no_of_congressional_district',
'no_of_barangay',
'year',
];
}
<file_sep>/app/Http/Controllers/ProjectManagementController.php
<?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use App\Project;
use App\ImplementorAgencyList;
use App\MonitoredByList;
use DB;
use App\Http\Controllers\FileController;
use PDF;
use Storage;
class ProjectManagementController extends Controller
{
public function getImplementorAgencyList()
{
return ImplementorAgencyList::get();
}
public function create(Request $request, Project $project)
{
$validate = $request->validate([
'title' => 'required',
'control_number' => 'required|unique:projects',
'implementing_agency' => 'required',
'sector' => 'required',
'project_cost' => 'required|numeric',
'loc_municipality' => 'required',
'fund_source' => 'required',
'mode_of_implementation' => 'required',
]);
$create = $project::create([
'title' => $request->input('title'),
'control_number' => $request->input('control_number'),
'mode_of_implementation' => $request->input('mode_of_implementation'),
'contractor' => $request->input('contractor'),
'loc_barangay' => $request->input('loc_barangay'),
'loc_municipality' => $request->input('loc_municipality'),
'implementing_agency' => $request->input('implementing_agency'),
'other_implementing_agency' => $request->input('other_implementing_agency'),
'municipality' => $request->input('municipality'),
'fund_source' => $request->input('fund_source'),
'fund_source_year' => $request->input('fund_source_year'),
'specific_fund_source' => $request->input('specific_fund_source'),
'sector' => $request->input('sector'),
'project_cost' => $request->input('project_cost'),
'status' => $request->input('status'),
]);
if($create)
{
return 'success';
}else{
return 'error';
}
}
public function update(Request $request, Project $project)
{
$validate = $request->validate([
'id' => 'required',
'title' => 'required',
'implementing_agency' => 'required',
'sector' => 'required',
'project_cost' => 'required|numeric',
'loc_municipality' => 'required',
'fund_source' => 'required',
'mode_of_implementation' => 'required',
]);
$update = $project::where('id', $request->input('id'))
->update([
'title' => $request->input('title'),
'mode_of_implementation' => $request->input('mode_of_implementation'),
'contractor' => $request->input('contractor'),
'loc_barangay' => $request->input('loc_barangay'),
'loc_municipality' => $request->input('loc_municipality'),
'implementing_agency' => $request->input('implementing_agency'),
'other_implementing_agency' => $request->input('other_implementing_agency'),
'municipality' => $request->input('municipality'),
'fund_source' => $request->input('fund_source'),
'fund_source_year' => $request->input('fund_source_year'),
'specific_fund_source' => $request->input('specific_fund_source'),
'sector' => $request->input('sector'),
'project_cost' => $request->input('project_cost'),
'status' => $request->input('status'),
]);
if($update)
{
return 'success';
}else{
return 'error';
}
}
public function delete(Request $request, Project $project)
{
$validate = $request->validate([
'id' => 'required'
]);
$delete = $project::where('id', $request->input('id'))->delete();
if($delete)
{
return 'success';
}else{
return 'error';
}
}
public function get(Project $project)
{
return $project::join('monitored_by_lists', 'monitored_by_lists.id','=','projects.monitored_by')->select('projects.*','monitored_by_lists.monitor as monitored_by')->orderBy('id', 'DESC')->get();
}
public function find(Request $request, Project $project)
{
$validate = $request->validate([
'id' => 'required'
]);
$info = $project::where('id', $request->input('id'))
->first();
return $info;
}
public function getMonitoringList()
{
return MonitoredByList::get();
}
public function updateMonitor(Request $request)
{
$request->validate([
'id' => 'required',
'monitored_by' => 'required',
'date_monitored' => 'required',
]);
$update = Project::where('id', $request->input('id'))
->update([
'monitored_by' => $request->input('monitored_by'),
'date_monitored' => $request->input('date_monitored'),
'monitoring_status' => 1,
]);
return 'success';
}
public function updateLink(Request $request)
{
$request->validate([
'id' => 'required'
]);
$update =Project::where('id', $request->input('id'))
->update(['link' => $request->input('link')]);
return 'success';
}
public function print_report(Request $request)
{
ini_set('memory_limit', '2048M');
ini_set('max_execution_time', 600);
$report_fields = [];
if($request->input('control_number') == "control_number")
{
$report_fields[] = "control_number";
}
if($request->input('title') == "title")
{
$report_fields[] = "title";
}
if($request->input('location') == "location")
{
$report_fields[] = "location";
}
if($request->input('project_cost') == "project_cost")
{
$report_fields[] = "project_cost";
}
if($request->input('fund_source') == "fund_source")
{
$report_fields[] = "fund_source";
}
if($request->input('mode_of_implementation') == "mode_of_implementation")
{
$report_fields[] = "mode_of_implementation";
}
if($request->input('contractor') == "contractor")
{
$report_fields[] = "contractor";
}
if($request->input('date_monitored') == "date_monitored")
{
$report_fields[] = "date_monitored";
}
if($request->input('field_month') == "month")
{
$report_fields[] = "field_month";
}
if($request->input('field_year') == "year")
{
$report_fields[] = "field_year";
}
if($request->input('monitored_by') == "monitored_by")
{
$report_fields[] = "monitored_by";
}
$quarter[0] = [1,2,3,4,5,6,7,8,9,10,11,12];
$quarter[1] = [1,2,3];
$quarter[2] = [4,5,6];
$quarter[3] = [7,8,9];
$quarter[4] = [10,11,12];
$quarter_number = 0;
$month_selected = 0;
$project = [];
$type = 0;
$monitoring_team = $request->input('monitoring_team');
$year = 0;
$selected_quarter = [];
switch ($request->input('report_type')) {
case "0":
$month_selected = $request->input('month');
$type = 1;
break;
case "1":
$quarter_number = $request->input('quarter');
$year = $request->input('quarter_year');
$type = 3;
$selected_quarter = $quarter[$request->input('quarter')];
break;
case "2":
$year = $request->input('year');
$type = 2;
break;
}
$project = Project::where(function($query) use ($request, $monitoring_team, $selected_quarter, $year){
switch ($request->input('report_type')) {
case "0":
$month_selected = $request->input('month');
$month = date("n", strtotime($month_selected));
$year = date("o", strtotime($month_selected));
$query->whereMonth('date_monitored', $month);
$query->whereYear('date_monitored', $year);
break;
case "1":
$query->whereIn(DB::raw('MONTH(date_monitored)'), $selected_quarter);
$query->whereYear('date_monitored', $year);
break;
case "2":
$query->whereYear('date_monitored', $request->input('year'));
break;
case "all":
break;
}
if($request->input('municipality') != "")
{
$query->where('loc_municipality', $request->input('municipality'));
}
if($monitoring_team != "")
{
$query->where('monitored_by', $monitoring_team);
}
if($request->input('fund_sources') != "")
{
$query->where('fund_sources', $request->input('fund_sources'));
}
if($request->input('fund_source_year') != "")
{
$query->where('fund_source_year', $request->input('fund_source_year'));
}
})
->where('monitoring_status', 1)
->with('rel_implementing_agency', 'rel_municipality', 'rel_monitor_by','location_barangay', 'location_municipality')
->get();
// ->toSql();
// dd($request->input('fund_sources'));
$arrs = [];
// foreach ($project as $value) {
// $mode_of_implementation = "";
// if($value->mode_of_implementation == 0)
// {
// $mode_of_implementation = "By Contract";
// }else{
// $mode_of_implementation = "By Administration";
// }
// $location = "";
// if(!!$value->location_barangay)
// {
// $location .= $value->location_barangay->barangay . ", ";
// }
// $arrs[] = [
// 'title' => $value->title,
// 'location' => $location . $value->location_municipality->municipality ?? "",
// 'project_cost' => 'PHP ' . $value->project_cost,
// 'fund_source' => $this->fund_source($value->fund_source, $value->specific_fund_source) . ' (' . $value->fund_source_year . ')',
// 'mode_of_implementation' => $mode_of_implementation,
// 'contractor' => $value->contractor ?? "-",
// 'field_month' => date('F', strtotime($value->date_monitored)),
// 'field_year' => date('Y', strtotime($value->date_monitored)),
// 'monitored_by' => $value->rel_monitor_by->monitor ?? "Not set",
// ];
// }
//
return view('print.project', compact('arrs','report_fields','type','quarter_number','month_selected','year', 'project'));
$pdf = PDF::loadView('print.project', compact('arrs','report_fields','type','quarter_number','month_selected','year','project'));
$pdf->setPaper($request->input('paper_size'), $request->input('page_orientation'));
// dd($arrs);
return $pdf->download('project_' . date('dmyGis') .'.pdf');
}
public static function fund_source($fund_source, $others)
{
$fund_source -= 1;
$arr = [
"20% Development Fund",
"General Fund",
"Special Education Fund",
"Trust Fund",
"Performance Challenge Fund",
"Local Government Support Fund",
"Others",
];
if($fund_source == 6)
{
return $arr[$fund_source] . ' ' . $others;
}
return $arr[$fund_source];
}
public function uploadAttachedFiles(Request $request)
{
ini_set('post_max_size', '128M');
ini_set('memory_limit', '128M');
ini_set('upload_max_filesize', '128M');
$control_number = $request->input('control_number');
// $files = $request->input('files');
$files = $request->file('files');
$make_directory = mkdir('/');
foreach ($files as $value) {
Storage::disk('public')->putFileAs('/'. $control_number, $value, $value->getClientOriginalName());
// Storage::disk('public')->put('/'. $control_number . "/" . $value->getClientOriginalName(), $value, 'public');
// FileController::uf_base64($value,'/' . $control_number, $value->getClientOriginalName());
}
return "success";
}
public function getAttachedFile(Request $request)
{
$control_number = $request->input('control_number');
$files_list = Storage::disk('public')->allFiles("/".$control_number . '/');
$arr = [];
foreach ($files_list as $list) {
$link = Storage::disk('public')->url($list);
$arr[] = $this->fileInfo(pathinfo($link), Storage::url($list));
}
return $arr;
}
public function getInfo(Request $request)
{
$id = $request->input('id');
return Project::with('location_barangay', 'location_municipality')->where('id', $id)->first();
}
public function fileInfo($file, $link)
{
$arr = [];
$arr["filename"] = $file["filename"];
$arr['extension'] = $file["extension"];
$arr["url"] = $link;
return $arr;
}
public function getFundSourceYear()
{
return Project::whereNotNull('fund_source_year')
->groupBy('fund_source_year')
->select('fund_source_year')
->get();
}
public function removeFile(Request $request)
{
$url = $request->input('url');
$url = str_replace("/storage/", "", $url);
Storage::disk('public')->delete($url);
return $url;
}
}
|
d530bd34d4bdb92566694359422ece5fdfddedaa
|
[
"JavaScript",
"PHP"
] | 97 |
PHP
|
zhoneyboo/project_management
|
1fde40d75d89f22119d69737161f96a922d9560e
|
a4c241245ef05c7ebbf1f14d6ae78d286cc7a273
|
refs/heads/main
|
<file_sep># Time Complexity : O(N)
# Space Complexity : O(N)
# Did this code successfully run on Leetcode : YES
# Any problem you faced while coding this : NO
"""
# Definition for Employee.
class Employee:
def __init__(self, id: int, importance: int, subordinates: List[int]):
self.id = id
self.importance = importance
self.subordinates = subordinates
"""
#BFS
class Solution:
def getImportance(self, employees: List['Employee'], id: int) -> int:
if len(employees)==0:
return 0
hashmap = {} #key - id , value = employees obj
for e in employees:
hashmap[e.id] = e
q = deque()
#add id to the queue
q.append(id)
result = 0
while(len(q)):
eid = q.popleft()
edetails = hashmap[eid]
result+=edetails.importance
for subid in edetails.subordinates:
q.append(subid)
return result
#DFS
class Solution:
def __init__(self):
self.importance = 0
def getImportance(self, employees: List['Employee'], id: int) -> int:
if len(employees)==0:
return 0
hashmap = {} #key - id , value = employees obj
for e in employees:
hashmap[e.id] = e
self.dfs(id,hashmap)
return self.importance
def dfs(self,id,hashmap):
emp = hashmap[id]
self.importance+=emp.importance
for e in emp.subordinates:
self.dfs(e,hashmap)
<file_sep># Time Complexity : O(M*N)
# Space Complexity : O(M*N)
# Did this code successfully run on Leetcode : YES
# Any problem you faced while coding this : NO
class Solution:
def orangesRotting(self, grid: List[List[int]]) -> int:
q=deque()
time=0
fresh=0
for i in range(len(grid)):
for j in range(len(grid[0])):
if grid[i][j]==1:
fresh+=1
if grid[i][j]==2:
q.append(i)
q.append(j)
if fresh==0:
return 0
dirs=[[1,0],[0,1],[-1,0],[0,-1]]
while q:
size=len(q)
for k in range(0,size, 2):
row=q.popleft()
column=q.popleft()
for dir in dirs:
r=row+dir[0]
c=column+dir[1]
if r>=0 and r<len(grid) and c>=0 and c<len(grid[0]) and grid[r][c]==1:
grid[r][c]=2
fresh-=1
q.append(r)
q.append(c)
time+=1
if fresh==0:
return time-1
return -1
|
cd16b7090724970a7f3d81499976b5beafa015c3
|
[
"Python"
] | 2 |
Python
|
Onurene/BFS-2.1
|
8268584ce3dbb2bd384294e2207989e5db436018
|
2ec30cf7ef04e6e77014265ad5ee9a10b0c66b6c
|
refs/heads/master
|
<file_sep>#include <SoftwareSerial.h>
#include <SPI.h>
#include <Ethernet.h>
#include <PubSubClient.h>
#include "BluetoothLE.h"
#define BEACONTOPIC "BEACON01"
#define SALATOPIC "UFC/QUIXADA/BLOCO01/SALA01"
int lampada = 8;
// Atualize estes com valores adequados para sua rede.
byte mac[] = {
0xDE, 0xED, 0xBA, 0xFE, 0xFE, 0xED };
IPAddress ip(192, 168, 1, 105);
IPAddress server(192, 168, 1, 100);
BluetoothLE ble(5,6);
EthernetClient ethClient;
PubSubClient client(ethClient);
void callback(char* topic, byte* payload, unsigned int length) {
String menssagem = "";
String topico = "";
for (int i=0;i<(int)strlen(topic);i++) {
topico += topic[i];
}
for (int i=0;i<length;i++) {
menssagem += (char)payload[i];
}
Serial.println(menssagem);
if(topico == BEACONTOPIC){
ble.setUUID(0, menssagem.substring(0, 8));
}
if(topico == SALATOPIC){
if(menssagem == "1"){
digitalWrite(lampada, HIGH);
delay(3000);
Serial.println("LIGOU O LED");
}
if(menssagem == "0"){
digitalWrite(lampada, LOW);
delay(3000);
Serial.println("DESLIGOU O LED");
}
}
Serial.println();
}
void reconnect(){
//Faça um loop até nos reconectarmos
while (!client.connected()) {
Serial.print("Tentando uma conexao MQTT...");
// Tentativa de conectar
if(client.connect("arduinoClient")){
Serial.println("Conectado!");
// ... e se inscrever novamente
client.subscribe(BEACONTOPIC);
client.subscribe(SALATOPIC);
}
else {
Serial.print("Erro ao se conectar, rc=");
Serial.print(client.state());
Serial.println("Tentando conectar novamente em 5s...");
// Espere 5 segundos e tente novamente.
delay(5000);
}
}
}
void setup()
{
Serial.begin(9600);
pinMode(led, OUTPUT);
Serial.println("Iniciando Bluetooth...");
ble.begin(9600);
Serial.println("Bluetooth iniciado!");
delay(1000);
Serial.println("Iniciando Ethernet...");
client.setServer(server, 1883);
client.setCallback(callback);
Ethernet.begin(mac, ip);
Serial.println("Ethernet iniciado!");
// Permitir que o hardware se classifique
delay(1500);
}
void loop()
{
if (!client.connected()) {
reconnect();
}
ble.loop();
if(ble.hasMessage()) {
Serial.print("BLE MENSAGEM: ");
Serial.print(ble.getMessage());
Serial.println();
}
client.loop();
}
|
bd79cbab95adfbd046ebe4b5c2ec7e6094cb6466
|
[
"C++"
] | 1 |
C++
|
fhugoduarte/QBeaconArduino
|
ba4b64b1c8b4f9215747f9d268779372cc1b3620
|
fbe44f840f7c229ffe2824ce4b0cc0eb1952143c
|
refs/heads/master
|
<file_sep><?php
class Weekday
{
// function addDayAndMonth($day, $month)
// {
// $day_and_month = $day + $month;
// if ($day_and_month > 6) {
// $for_step_3 = $day_and_month % 7;
// } else {
// $for_step_3 = $day_and_month;
// }
// return $for_step_3;
// }
function calculateDay($month, $day, $year)
{
$day_of_week = date("l", mktime(0,0,0, $month, $day, $year));
return $day_of_week;
}
}
?>
<file_sep><?php
require_once "src/Weekday.php";
class WeekdayTest extends PHPUnit_Framework_TestCase
{
// function test_addDayAndMonth()
// {
// $test_weekday = new Weekday;
// $input_day = 28;
// $input_month = 6;
//
// $result = $test_weekday->addDayAndMonth($input_day, $input_month);
//
// $this->assertEquals(6, $result);
// }
function test_calculateDay()
{
$test_weekday = new Weekday;
$input_month = 7;
$input_day = 4;
$input_year = 1776;
$result = $test_weekday->calculateDay($input_month, $input_day, $input_year);
$this->assertEquals("Thursday", $result);
}
}
?>
|
2d4f77317dd0febd6c08c1b8de4b9b50a7721534
|
[
"PHP"
] | 2 |
PHP
|
mollieboots/php-weekday-finder
|
d0edfbd3a09a34289d4a87276ecde7273bcb9621
|
b94dd176ea0ea2a77c504f04cd6794e4c93bd234
|
refs/heads/main
|
<repo_name>stevenyuan49/anki-player.github.io-<file_sep>/extension/src/services/ControlsContainer.js
export default class ControlsContainer {
constructor(video) {
this.video = video;
this.elements = [];
}
show() {
for (const e of this.elements) {
e.classList.remove('asbplayer-hide');
}
}
hide() {
this._garbageCollectElements();
this._findElements();
for (const e of this.elements) {
e.classList.add('asbplayer-hide');
}
return new Promise((resolve, reject) => {
setTimeout(() => resolve(), 0);
});
}
_garbageCollectElements() {
this.elements = this.elements.filter(e => document.body.contains(e));
}
_findElements() {
for (const p of this._samplePoints()) {
for (const element of this._path(document.elementFromPoint(p.x, p.y))) {
if (element && !this._contains(this.elements, element)) {
this.elements.push(element);
}
}
}
}
* _samplePoints() {
const rect = this.video.getBoundingClientRect();
const stepX = rect.width / 25;
const stepY = rect.height / 25;
for (let x = rect.x; x <= rect.width + rect.x; x += stepX) {
for (let y = rect.y; y <= rect.height + rect.y; y += stepY) {
yield {x: x, y: y};
}
}
}
* _path(element) {
if (!element || element.contains(this.video)) {
return;
}
let current = element;
yield current;
while (true) {
const parent = current.parentElement;
if (!parent || parent.contains(this.video)) {
break;
}
current = parent;
yield current;
}
}
_contains(elements, element) {
for (const e of elements) {
if (e.isSameNode(element)) {
return true;
}
}
return false;
}
}<file_sep>/extension/src/background.js
import TabRegistry from './services/TabRegistry';
import Settings from './services/Settings';
import AudioRecorder from './services/AudioRecorder';
import ImageCapturer from './services/ImageCapturer';
import VideoHeartbeatHandler from './handlers/video/VideoHeartbeatHandler';
import RecordMediaHandler from './handlers/video/RecordMediaHandler';
import RerecordMediaHandler from './handlers/video/RerecordMediaHandler';
import StartRecordingMediaHandler from './handlers/video/StartRecordingMediaHandler';
import StopRecordingMediaHandler from './handlers/video/StopRecordingMediaHandler';
import ToggleSubtitlesHandler from './handlers/video/ToggleSubtitlesHandler';
import SyncHandler from './handlers/video/SyncHandler';
import HttpPostHandler from './handlers/video/HttpPostHandler';
import VideoToAsbplayerCommandForwardingHandler from './handlers/video/VideoToAsbplayerCommandForwardingHandler';
import AsbplayerToVideoCommandForwardingHandler from './handlers/asbplayer/AsbplayerToVideoCommandForwardingHandler';
import AsbplayerV2ToVideoCommandForwardingHandler from './handlers/asbplayerv2/AsbplayerV2ToVideoCommandForwardingHandler';
import AsbplayerHeartbeatHandler from './handlers/asbplayerv2/AsbplayerHeartbeatHandler';
import RefreshSettingsHandler from './handlers/popup/RefreshSettingsHandler';
const settings = new Settings();
const tabRegistry = new TabRegistry(settings);
const audioRecorder = new AudioRecorder();
const imageCapturer = new ImageCapturer(settings);
const handlers = [
new VideoHeartbeatHandler(tabRegistry),
new RecordMediaHandler(audioRecorder, imageCapturer),
new RerecordMediaHandler(audioRecorder),
new StartRecordingMediaHandler(audioRecorder, imageCapturer),
new StopRecordingMediaHandler(audioRecorder, imageCapturer),
new ToggleSubtitlesHandler(settings, tabRegistry),
new SyncHandler(tabRegistry),
new HttpPostHandler(),
new VideoToAsbplayerCommandForwardingHandler(),
new AsbplayerToVideoCommandForwardingHandler(),
new AsbplayerHeartbeatHandler(tabRegistry),
new AsbplayerV2ToVideoCommandForwardingHandler(),
new RefreshSettingsHandler(tabRegistry)
];
chrome.runtime.onMessage.addListener(
(request, sender, sendResponse) => {
for (const handler of handlers) {
if (handler.sender === request.sender) {
if (handler.command === null
|| handler.command === request.message.command) {
if (handler.handle(request, sender, sendResponse)) {
return true;
}
break;
}
}
}
}
);
chrome.commands.onCommand.addListener((command) => {
chrome.tabs.query({active: true}, (tabs) => {
if (!tabs || tabs.length === 0) {
return;
}
switch (command) {
case 'copy-subtitle':
case 'copy-subtitle-with-dialog':
for (const tab of tabs) {
for (const id in tabRegistry.videoElements) {
if (tabRegistry.videoElements[id].tab.id === tab.id) {
chrome.tabs.sendMessage(tabRegistry.videoElements[id].tab.id, {
sender: 'asbplayer-extension-to-video',
message: {
command: 'copy-subtitle',
showAnkiUi: command === 'copy-subtitle-with-dialog'
},
src: tabRegistry.videoElements[id].src
});
}
}
}
break;
case 'toggle-video-select':
for (const tab of tabs) {
chrome.tabs.sendMessage(tab.id, {
sender: 'asbplayer-extension-to-video',
message: {
command: 'toggle-video-select',
}
});
}
break;
default:
throw new Error('Unknown command ' + command);
}
});
});
<file_sep>/client/src/components/HelpDialog.js
import { makeStyles } from '@material-ui/styles';
import Button from '@material-ui/core/Button';
import Dialog from '@material-ui/core/Dialog';
import DialogActions from '@material-ui/core/DialogActions';
import DialogContent from '@material-ui/core/DialogContent';
import Link from '@material-ui/core/Link';
import SpeedIcon from '@material-ui/icons/Speed';
import StarIcon from '@material-ui/icons/Star';
import Table from '@material-ui/core/Table';
import TableBody from '@material-ui/core/TableBody';
import TableContainer from '@material-ui/core/TableContainer';
import TableCell from '@material-ui/core/TableCell';
import TableRow from '@material-ui/core/TableRow';
import Typography from '@material-ui/core/Typography';
const useStyles = makeStyles((theme) => ({
inlineIcon: {
maxWidth: '80%',
height: 20,
display: "inline-flex",
flexDirection: "column",
flexWrap: "wrap",
alignItems: "start",
textAlign: "left"
}
}));
export default function HelpDialog(props) {
const classes = useStyles();
const {open, extensionUrl, onClose} = props;
return (
<Dialog
open={open}
onBackdropClick={onClose}
onEscapeKeyDown={onClose}
>
<DialogContent>
<Typography variant="h6">
Loading files
</Typography>
<Typography component="ul">
<li>
Drag and drop mkv, srt, ass, or mp3 files into the player.
</li>
<li>
Multiple files can be dragged and dropped
simultaneously e.g. mkv+srt, mp3+ass etc.
</li>
<li>
Multiple subtitle files can loaded simultaneously. When multiple subtitle files are loaded,
they can be toggled on and off in-video using S+1, S+2, etc.
</li>
</Typography>
<br />
<Typography variant="h6">
Syncing with streaming video in another tab
</Typography>
<Typography component="ul">
<li>
Install the Chrome <Link color="secondary" target="_blank" rel="noreferrer" href={extensionUrl}>extension</Link>.
</li>
<li>Drag-and-drop a subtitle file into the video element you want to sync.</li>
<li>
Or, load a subtitle file into asbplayer and use the camera button in the bottom right.
</li>
<li>
It is recommended to use the extension keyboard shortcut (Ctrl+Shift+X by default) to mine subtitles since that will include audio/screenshots.
</li>
</Typography>
<br />
<Typography variant="h6">
Anki
</Typography>
<Typography component="ul">
<li>Synchronous workflow:</li>
<ul>
<li>
For synced streaming video, open the Anki dialog during playback by using Ctrl+Shift+X.
</li>
<li>
For local file playback, open the Anki dialog during playback by using Ctrl+Shift+Q.
</li>
</ul>
<li>Asynchronous workflow:</li>
<ul>
<li>
For synced streaming video, copy the current subtitle by using Ctrl+Shift+Z.
</li>
<li>
For local file playback, copy the current subtitle by using Ctrl+Shift+A.
</li>
<li>
<div className={classes.inlineIcon}>
Use the <StarIcon fontSize="small" /> button in the copy history.
</div>
</li>
</ul>
<li>
For synced streaming video, an audio/image will only be available if an extension keyboard shortcut was used (Ctrl+Shift+X or Ctrl+Shift+Z by default).
</li>
<li>
Configure Anki settings with the settings button in the top right. See this <Link color="secondary" target="_blank" rel="noreferrer" href="https://youtu.be/Mv7fEVb6PHo?t=44">video</Link> for how to configure AnkiConnect so that asbplayer can connect to Anki.
</li>
</Typography>
<br />
<Typography variant="h6">
Changing subtitle offset
</Typography>
<Typography component="ul">
<li>
Use Ctrl+Left/Right to cause the previous/next subtitle to appear at the current timestamp.
</li>
<li>
Use Ctrl+Shift+Left/Right to adjust timing further by 100 ms increments.
</li>
<li>
Or, click on the subtitle offset input field in the controls, type a number, and hit Enter.
</li>
</Typography>
<br />
<Typography variant="h6">
Condensed playback of local media files
</Typography>
<Typography component="ul">
<li>
Load an audio/video file with a subtitle file.
</li>
<li>
<div className={classes.inlineIcon}>
Use the <SpeedIcon fontSize="small" /> button in the bottom right.
</div>
</li>
</Typography>
<br />
<Typography variant="h6">
Keyboard shortcuts
</Typography>
<TableContainer>
<Table size="small">
<TableBody>
<TableRow>
<TableCell><Typography>Ctrl+Shift+A</Typography></TableCell>
<TableCell><Typography>Copy current subtitle</Typography></TableCell>
</TableRow>
<TableRow>
<TableCell><Typography>Ctrl+Shift+Q</Typography></TableCell>
<TableCell><Typography>Copy current subtitle and open Anki export dialog</Typography></TableCell>
</TableRow>
<TableRow>
<TableCell><Typography>Ctrl+Shift+Z</Typography></TableCell>
<TableCell><Typography>Copy current subtitle (streaming video in another tab)</Typography></TableCell>
</TableRow>
<TableRow>
<TableCell><Typography>Ctrl+Shift+X</Typography></TableCell>
<TableCell><Typography>Copy current subtitle and open Anki export dialog (streaming video in another tab)</Typography></TableCell>
</TableRow>
<TableRow>
<TableCell><Typography>Space</Typography></TableCell>
<TableCell><Typography>Play/pause</Typography></TableCell>
</TableRow>
<TableRow>
<TableCell><Typography>S</Typography></TableCell>
<TableCell><Typography>Toggle subtitles</Typography></TableCell>
</TableRow>
<TableRow>
<TableCell><Typography>S+1, S+2...</Typography></TableCell>
<TableCell><Typography>Toggle subtitle track 1, 2... in video</Typography></TableCell>
</TableRow>
<TableRow>
<TableCell><Typography>D+1, D+2...</Typography></TableCell>
<TableCell><Typography>Toggle subtitle track 1, 2... in asbplayer</Typography></TableCell>
</TableRow>
<TableRow>
<TableCell><Typography>Left/Right</Typography></TableCell>
<TableCell><Typography>Seek to previous/next subtitle</Typography></TableCell>
</TableRow>
<TableRow>
<TableCell><Typography>Ctrl+Left/Right or Shift+Left/Right</Typography></TableCell>
<TableCell><Typography>Adjust offset to previous/next subtitle</Typography></TableCell>
</TableRow>
<TableRow>
<TableCell><Typography>Ctrl+Shift+Left/Right</Typography></TableCell>
<TableCell><Typography>Adjust offset by +/- 100 ms</Typography></TableCell>
</TableRow>
</TableBody>
</Table>
</TableContainer>
<br />
<Typography variant="h6">
Common issues
</Typography>
<Typography component="ul">
<li>
asbplayer isn't detecting streaming video.
</li>
<ul>
<li>
Try refreshing both the asbplayer tab and the video in the other tab.
</li>
<li>
Make sure that in the extension details, the extension has access to all sites.
A lot of video players are loaded inside of iframes, so it's difficult to
list every single URL that the extension might need access to.
</li>
</ul>
<li>
The extension keyboard shortcuts aren't working.
</li>
<ul>
<li>
Try uninstalling and reinstalling the extension and restarting Chrome.
</li>
<li>
Make sure the extension isn't installed twice.
</li>
<li>
Verify the keyboard shortcut is bound as in this <Link color="secondary" target="_blank" rel="noreferrer" href="https://youtu.be/wYWbgovfNlI">video</Link>.
</li>
</ul>
<li>
asbplayer can't connect to Anki. It shows an error message e.g. 'Failed to fetch.'
</li>
<ul>
<li>
If you're using Brave, make sure asbplayer isn't being blocked by Shield.
</li>
<li>
Make sure that asbplayer is allowed by AnkiConnect, as in this <Link color="secondary" target="_blank" rel="noreferrer" href="https://youtu.be/Mv7fEVb6PHo?t=44">video</Link>.
</li>
<li>
Check that your browser or an ad blocker isn't blocking the request. A good place to start is by opening your browser's developer console and looking for errors.
</li>
</ul>
<li>
When mining streaming video, asbplayer isn't including audio/screenshot in flashcards.
</li>
<ul>
<li>
Make sure you use an extension keyboard shortcut (Ctrl+Shift+X by default).
</li>
</ul>
</Typography>
<Typography variant="h6">
Demos
</Typography>
<Typography component="ul">
<li>
<Link color="secondary" target="_blank" rel="noreferrer" href="https://www.youtube.com/watch?v=W9Lf3C7sRzc">Sentence mining streaming video (synchronous workflow)</Link>
</li>
<li>
<Link color="secondary" target="_blank" rel="noreferrer" href="https://www.youtube.com/watch?v=kJXVVixD8H8">Sentence mining streaming video (asynchronous workflow)</Link>
</li>
<li>
<Link color="secondary" target="_blank" rel="noreferrer" href="https://www.youtube.com/watch?v=sgrJF99WX-Q">Sentence mining streaming video (no subtitle file)</Link>
</li>
<li>
<Link color="secondary" target="_blank" rel="noreferrer" href="https://www.youtube.com/watch?v=J3E82spYqIk">Sentence mining video files (synchronous workflow)</Link>
</li>
<li>
<Link color="secondary" target="_blank" rel="noreferrer" href="https://www.youtube.com/watch?v=HsrrpnfM4pI">Sentence mining video files (asynchronous workflow)</Link>
</li>
</Typography>
<br />
<Typography variant="h6">
Browser Compatibility
</Typography>
<Typography component="ul">
<li>
The asbplayer application and extension have only been tested on Chrome 91 and later and likely work on other Chromium-based browsers.
</li>
<li>
Local file playback is supported only for codecs supported by the browser.
The <Link color="secondary" target="_blank" rel="noreferrer" href="https://github.com/animebook/animebook.github.io#video-format-support">animebook</Link> readme has a detailed explanation of this and links
to browsers that have good compatibility. Personally, I use Microsoft Edge and paid one dollar for HEVC support.
</li>
<li>
Audio track selection for mkv files is available if experimental web platform features are enabled from chrome://flags.
</li>
</Typography>
</DialogContent>
<DialogActions>
<Button onClick={() => onClose()}>OK</Button>
</DialogActions>
</Dialog>
);
}
<file_sep>/extension/src/ui/index.js
import '@fontsource/roboto';
import React from 'react';
import { render } from 'react-dom';
import AnkiUi from './components/AnkiUi';
import VideoNameUi from './components/VideoNameUi';
import Bridge from './Bridge';
export function renderAnkiUi(element, mp3WorkerUrl) {
const bridge = new Bridge();
render(<AnkiUi bridge={bridge} mp3WorkerUrl={mp3WorkerUrl} />, element);
return bridge;
}
export function renderVideoNameUi(element) {
const bridge = new Bridge();
render(<VideoNameUi bridge={bridge} />, element);
return bridge;
}
|
85a03f08a2508c5ef645efbca240f05a655c660e
|
[
"JavaScript"
] | 4 |
JavaScript
|
stevenyuan49/anki-player.github.io-
|
8b41aa0910cd78825d34ea81cfb9b0b95a21a82b
|
3122e6ebea49a3d1cd07071429ad05b1476ed34a
|
refs/heads/master
|
<repo_name>nvgeele/slipy<file_sep>/slipy/json.py
from rpython.rlib.parsing.pypackrat import PackratParser
from rpython.rlib.parsing.makepackrat import BacktrackException, Status
def unquote(s):
str_lst = []
pos = 1
last = len(s)-1
while pos < last:
ch = s[pos]
if ch == '\\':
pos += 1
ch = s[pos]
if ch == '\\' or ch == '\"':
str_lst.append(ch)
else:
raise Exception("String unquote error")
else:
str_lst.append(ch)
pos += 1
return ''.join(str_lst)
class J_Base(object):
is_string = is_bool = is_list = is_object = is_num = False
def string_value(self):
raise TypeError
def bool_value(self):
raise TypeError
def object_value(self):
raise TypeError
def list_value(self):
raise TypeError
def num_value(self):
raise TypeError
class J_Simple(J_Base):
def __init__(self, val):
self._val = val
class J_String(J_Simple):
is_string = True
def string_value(self):
return self._val
class J_Bool(J_Simple):
is_bool = True
def bool_value(self):
return self._val
class Entry(object):
def __init__(self, key, val):
assert isinstance(key, J_String)
self._key = key.string_value()
self._val = val
def key(self):
return self._key
def val(self):
return self._val
class J_Object(J_Base):
is_object = True
def __init__(self, entries):
self._dict = {}
for entry in entries:
self._dict[entry.key()] = entry.val()
def object_value(self):
return self._dict
class J_List(J_Base):
is_list = True
def __init__(self, values):
self._list = []
for v in values:
assert isinstance(v, J_Base)
self._list.append(v)
def list_value(self):
return self._list
class J_Num(J_Base):
is_num = True
def __init__(self, str):
try:
self._val = int(str)
except:
try:
self._val = float(str)
except:
raise Exception("Number type not supported")
def num_value(self):
return self._val
j_true = J_Bool(True)
j_false = J_Bool(False)
class JSONParser(PackratParser):
"""
IGNORE:
` |\n|\t`;
STRING:
IGNORE*
c = `\\"[^\\\\"]*\\"`
IGNORE*
return {J_String(unquote(c))};
NUMBER:
IGNORE*
c = `\-?(0|[1-9][0-9]*)(\.[0-9]+)?([eE][\+\-]?[0-9]+)?`
IGNORE*
return {J_Num(c)};
TRUE:
'true'
return {j_true};
FALSE:
'false'
return {j_false};
array_values:
r = array_values
IGNORE*
','
IGNORE*
v = value
return {r+[v]}
| v = value
IGNORE*
return {[v]}
| return {[]};
array:
IGNORE*
'['
IGNORE*
c = array_values
']'
return {J_List(c)};
entry:
s = STRING
IGNORE*
':'
IGNORE*
v = value
return {Entry(s, v)};
entries:
e = entry
','
IGNORE*
r = entries
return {[e] + r}
| e = entry
return {[e]};
obj:
IGNORE*
'{'
IGNORE*
e = entries
IGNORE*
'}'
return {J_Object(e)};
value:
STRING
| NUMBER
| TRUE
| FALSE
| obj
| array;
"""
def loads(str):
p = JSONParser(str)
try:
v = p.value()
return v
except:
raise Exception("Could not parse JSON")<file_sep>/README.md
SliPy
=====
It's Slip, in PyPy!
<file_sep>/slipy/AST.py
from rpython.rlib import jit
from slipy.continuation import *
from slipy.environment import Env, get_global_env
from slipy.exceptions import SlipException
from slipy.util import zip
from slipy.values import *
class AST(object):
simple = False
def eval(self, env, cont):
if self.simple:
from slipy.interpreter import return_value_direct
return return_value_direct(self.eval_simple(env), env, cont)
raise Exception("abstract base class")
def eval_simple(self, env):
raise Exception("abstract base class")
def __str__(self):
return "<AST>"
def to_string(self):
return self.__str__()
class Application(AST):
_immutable_fields_ = ["_operator", "_operands[*]"]
def __init__(self, operator, operands):
assert operator.simple
for o in operands:
assert o.simple
self._operator = operator
self._operands = operands
@jit.unroll_safe
def eval(self, env, cont):
# Operator & operands are aexps, thus simple
operator = self._operator.eval_simple(env)
operands = [None] * len(self._operands)
for i, op in enumerate(self._operands):
operands[i] = op.eval_simple(env)
if not isinstance(operator, W_Callable):
raise SlipException("Operator not a callable instance")
else:
return operator.call(operands, env, cont)
def __str__(self):
rator_str = self._operator.to_string()
rands_str = [None] * len(self._operands)
for i, op in enumerate(self._operands):
rands_str[i] = op.to_string()
rands_str = " ".join(rands_str)
return "(%s %s)" % (rator_str, rands_str)
class If(AST):
_immutable_fields_ = ["test", "consequent", "alternative"]
def __init__(self, test, consequent, alternative):
self.test = test
self.consequent = consequent
self.alternative = alternative
def eval(self, env, cont):
# Test is an aexp, i.e. simple
test = self.test.eval_simple(env)
if is_true(test):
return self.consequent, env, cont
else:
return self.alternative, env, cont
def __str__(self):
return "(if %s %s %s)" % \
(self.test.to_string(), self.consequent.to_string(),
self.alternative.to_string())
class Lambda(AST):
_immutable_fields_ = ["args[*]", "body[*]", "vars[*]"]
simple = True
def __init__(self, args, vars, body):
self.args = args
self.body = body
self.vars = vars
def eval_simple(self, env):
return W_Closure(self.args, self.vars, env, self.body)
def __str__(self):
args = [None] * len(self.args)
for i, arg in enumerate(self.args):
args[i] = arg.to_string()
vars = [None] * len(self.vars)
for i, var in enumerate(self.vars):
vars[i] = var.to_string()
body = [None] * len(self.body)
for i, exp in enumerate(self.body):
body[i] = exp.to_string()
return "(lambda (%s) (%s) %s)" % (" ".join(args),
" ".join(vars),
" ".join(body))
class Let(AST):
_immutable_fields_ = ["vars[*]", "vals[*]", "decls[*]", "body"]
def __init__(self, vars, vals, decls, body):
self.vars = vars
self.vals = vals
self.decls = decls
self.body = Sequence(body)
def eval(self, env, cont):
new_env = Env(len(self.vars) + len(self.decls), previous=env)
return self.make_cont(new_env, cont)
def make_cont(self, env, prev, i=0):
# jit.promote(self)
# jit.promote(i)
if i == len(self.vars):
return self.body, env, prev
else:
return self.vals[i], env, LetContinuation(self, i, env, prev)
def __str__(self):
vars = [None] * len(self.vars)
for i, t in enumerate(zip(self.vars, self.vals)):
var, val = t
vars[i] = "[%s %s]" % (var.to_string(), val.to_string())
decls = [None] * len(self.decls)
for i, var in enumerate(self.decls):
decls[i] = var.to_string()
body = [None] * len(self.body.body)
for i, exp in enumerate(self.body.body):
body[i] = exp.to_string()
return "(let (%s) (%s) %s)" % ("".join(vars),
" ".join(decls),
" ".join(body))
class SetBang(AST):
# We do not need a continuation as the val is an aexp
simple = True
_immutable_fields_ = ["sym", "scope", "offset", "val"]
def __init__(self, sym, scope, offset, val):
self.sym = sym
self.val = val
self.scope = int(scope)
self.offset = int(offset)
def eval_simple(self, env):
val = self.val.eval_simple(env)
env.set_var(self.scope, self.offset, val)
return val
def __str__(self):
return "(set! %s %s)" % (self.sym.to_string(), self.val.to_string())
class Sequence(AST):
_immutable_fields_ = ["body[*]"]
def __init__(self, exprs):
assert len(exprs) > 0, "Sequence body needs at least one expression"
self.body = exprs
def eval(self, env, cont):
return self.make_cont(env, cont)
def make_cont(self, env, prev, i=0):
jit.promote(self)
jit.promote(i)
if i == len(self.body) - 1:
return self.body[i], env, prev
else:
return self.body[i], env, SequenceContinuation(self, i+1, env, prev)
def __str__(self):
exprs = [None] * len(self.body)
for i, exp in enumerate(self.body):
exprs[i] = exp.to_string()
exprs = " ".join(exprs)
return "(begin %s)" % exprs
class VarRef(AST):
_immutable_fields_ = ["sym", "scope", "offset"]
simple = True
def __init__(self, sym, scope, offset):
self.sym = sym
self.scope = int(scope)
self.offset = int(offset)
def eval_simple(self, env):
cell = env.get_var(self.scope, self.offset)
val = cell.get_value()
if val is w_undefined:
raise SlipException("Variable referenced before definition")
return val
def __str__(self):
return self.sym.to_string()
class Program(AST):
_immutable_fields_ = ["vars[*]", "body"]
def __init__(self, vars, body):
self.vars = vars
self.body = Sequence(body)
def eval(self, env, cont):
env = Env(len(self.vars), previous=get_global_env())
return self.body, env, cont
def __str__(self):
vars = [None] * len(self.vars)
for i, var in enumerate(self.vars):
vars[i] = var.to_string()
body = [None] * len(self.body.body)
for i, exp in enumerate(self.body.body):
body[i] = exp.to_string()
return "((%s) %s)" % (" ".join(vars), " ".join(body))
# TODO: if val is a list, return copy
class Quote(AST):
_immutable_fields_ = ["_val"]
simple = True
def __init__(self, val):
self._val = val
def eval_simple(self, env):
return self._val
def __str__(self):
if isinstance(self._val, W_Symbol) or isinstance(self._val, W_Pair):
return "'%s" % self._val.to_string()
else:
return self._val.to_string()<file_sep>/runbenchmarks.sh
#!/bin/bash
for f in ./benchmarks/*.scm
do
echo $f
../rpislip/slip -l $f | tee $f.slip
./targetslipy-c $f | tee $f.slipy
done
<file_sep>/targetslipy.py
from slipy.exceptions import SlipException
from slipy.parse import parse_ast
from slipy.interpreter import interpret_with_global, initialize_global_env
from slipy.read import expand_string, expand_file, init_reader
from rpython.rlib.objectmodel import we_are_translated
# TODO: Cache ASTs of expanded files to disk?
def main(argv):
# TODO: Top-level env join with varlet
if not len(argv) == 2:
print "Please provide a file as argument!"
return 1
try:
init_reader()
initialize_global_env()
data = expand_file(argv[1])
ast = parse_ast(data)
print "<< SliPy >>"
print interpret_with_global(ast).to_string()
except SlipException, e:
print "Slip error: %s" % e.message
raise
except Exception, e:
print "Caught an exception!"
raise
return 0
def target(*args):
return main, None
if __name__ == '__main__':
import sys
main(sys.argv)<file_sep>/slipy/natives.py
import time
from math import sin, sqrt
from rpython.rlib import jit
from slipy.exceptions import *
from slipy.continuation import MapStartContinuation
from slipy.read import read_string, expand_string
from slipy.values import *
from slipy.util import raw_input, write
native_dict = {}
_current_offset = 0
# TODO: dedicated functions for error throwing
# TODO: test with append if lists are copied properly
# TODO: automatic type checkers in declare_native
def declare_native(name, simple=True):
def wrapper(func):
def inner(args, env, cont):
if simple:
from slipy.interpreter import return_value_direct
result = func(args)
return return_value_direct(result, env, cont)
else:
# TODO: without the assert, the inferencer tells us ret may be None
# TODO: find out why! Probably due to eval and EvaluationFinished or sth
ret = func(args, env, cont)
assert ret
return ret
global _current_offset
native = W_NativeFunction(inner)
names = [name] if isinstance(name, str) else name
for n in names:
sym = W_Symbol.from_string(n)
native_dict[sym] = (_current_offset, native)
_current_offset += 1
inner.func_name = "%s_wrapped" % func.func_name
return inner
return wrapper
@declare_native("eq?")
def is_eq(args):
if not len(args) == 2:
raise SlipException(arg_count_error % "eq?")
# TODO: if we memoise W_Numbers, we would not need to do this
if isinstance(args[0], W_Number) and isinstance(args[1], W_Number):
return W_Boolean.from_value(args[0].is_eq(args[1]))
else:
return W_Boolean.from_value(args[0] is args[1])
@declare_native("pair?")
def is_pair(args):
if not len(args) == 1:
raise SlipException(arg_count_error % "pair?")
return W_Boolean.from_value(isinstance(args[0], W_Pair))
@declare_native("not")
def is_not(args):
if not len(args) == 1:
raise SlipException(arg_count_error % "not")
return W_Boolean.from_value(args[0] is w_false)
@declare_native("+")
@jit.unroll_safe
def plus(args):
if len(args) == 0:
return W_Integer(0)
elif len(args) == 1:
if not isinstance(args[0], W_Number):
raise SlipException(arg_types_error % "+")
return args[0]
else:
acc = args[0]
for i in range(1, jit.promote(len(args))):
if not isinstance(args[i], W_Number):
raise SlipException(arg_types_error % "+")
acc = acc.add(args[i])
return acc
@declare_native("-")
@jit.unroll_safe
def minus(args):
if len(args) == 0:
return W_Integer(0)
elif len(args) == 1:
if not isinstance(args[0], W_Number):
raise SlipException(arg_types_error % "-")
return W_Integer(0).sub(args[0])
else:
acc = args[0]
for i in range(1, jit.promote(len(args))):
if not isinstance(args[i], W_Number):
raise SlipException(arg_types_error % "-")
acc = acc.sub(args[i])
return acc
@declare_native("*")
@jit.unroll_safe
def multiply(args):
if len(args) == 0:
return W_Integer(1)
elif len(args) == 1:
if not isinstance(args[0], W_Number):
raise SlipException(arg_types_error % "*")
return args[0]
else:
acc = args[0]
for i in range(1, jit.promote(len(args))):
if not isinstance(args[i], W_Number):
raise SlipException(arg_types_error % "*")
acc = acc.mul(args[i])
return acc
@declare_native("/")
@jit.unroll_safe
def divide(args):
if len(args) == 0:
return W_Integer(1)
elif len(args) == 1:
if not isinstance(args[0], W_Number):
raise SlipException(arg_types_error % "/")
return args[0]
else:
acc = args[0]
for i in range(1, jit.promote(len(args))):
if not isinstance(args[i], W_Number):
raise SlipException(arg_types_error % "/")
acc = acc.div(args[i])
return acc
@declare_native("sqrt")
def num_sqrt(args):
if not len(args) == 1:
raise SlipException(arg_count_error % "sin")
if not isinstance(args[0], W_Number):
raise SlipException(arg_types_error % "sin")
return W_Float(sqrt(args[0].value()))
@declare_native("sin")
def num_sin(args):
if not len(args) == 1:
raise SlipException(arg_count_error % "sin")
if not isinstance(args[0], W_Number):
raise SlipException(arg_types_error % "sin")
return W_Float(sin(args[0].value()))
@declare_native("quotient")
def num_quotient(args):
if not len(args) == 2:
raise SlipException(arg_count_error % "quotient")
if not isinstance(args[0], W_Number):
raise SlipException(arg_types_error % "quotient")
if not isinstance(args[1], W_Number):
raise SlipException(arg_types_error % "quotient")
return W_Integer(args[0].value()/args[1].value())
@declare_native("=")
@jit.unroll_safe
def num_equal(args):
if not len(args) >= 2:
raise SlipException(arg_count_error % "=")
i = 2
v = True
while i <= jit.promote(len(args)):
l, r = args[i-2], args[i-1]
if not isinstance(l, W_Number):
raise SlipException(arg_types_error % "=")
if not isinstance(r, W_Number):
raise SlipException(arg_types_error % "=")
v = v and l.is_eq(r)
if not v:
return w_false
i += 1
return W_Boolean.from_value(v)
@declare_native("<")
@jit.unroll_safe
def num_lt(args):
if not len(args) >= 2:
raise SlipException(arg_count_error % "<")
i = 2
v = True
while i <= jit.promote(len(args)):
l, r = args[i-2], args[i-1]
if not isinstance(l, W_Number):
raise SlipException(arg_types_error % "<")
if not isinstance(r, W_Number):
raise SlipException(arg_types_error % "<")
v = v and l.lt(r)
if not v:
return w_false
i += 1
return W_Boolean.from_value(v)
@declare_native(">")
@jit.unroll_safe
def num_gt(args):
if not len(args) >= 2:
raise SlipException(arg_count_error % ">")
i = 2
v = True
while i <= jit.promote(len(args)):
l, r = args[i-2], args[i-1]
if not isinstance(l, W_Number):
raise SlipException(arg_types_error % ">")
if not isinstance(r, W_Number):
raise SlipException(arg_types_error % ">")
v = v and l.gt(r)
if not v:
return w_false
i += 1
return W_Boolean.from_value(v)
@declare_native("<=")
@jit.unroll_safe
def num_le(args):
if not len(args) >= 2:
raise SlipException(arg_count_error % "<=")
i = 2
v = True
while i <= jit.promote(len(args)):
l, r = args[i-2], args[i-1]
if not isinstance(l, W_Number):
raise SlipException(arg_types_error % "<=")
if not isinstance(r, W_Number):
raise SlipException(arg_types_error % "<=")
v = v and l.le(r)
if not v:
return w_false
i += 1
return W_Boolean.from_value(v)
@declare_native(">=")
@jit.unroll_safe
def num_ge(args):
if not len(args) >= 2:
raise SlipException(arg_count_error % ">=")
i = 2
v = True
while i <= jit.promote(len(args)):
l, r = args[i-2], args[i-1]
if not isinstance(l, W_Number):
raise SlipException(arg_types_error % ">=")
if not isinstance(r, W_Number):
raise SlipException(arg_types_error % ">=")
v = v and l.ge(r)
if not v:
return w_false
i += 1
return W_Boolean.from_value(v)
@declare_native("exact->inexact")
def exact_inexact(args):
if not len(args) == 1:
raise SlipException(arg_count_error % "exact->inexact")
if not isinstance(args[0], W_Integer):
raise SlipException(arg_types_error % "exact->inexact")
return W_Float(args[0].value())
@declare_native(["error", "fatal-error"])
def throw_error(args):
if not len(args) == 1:
raise SlipException(arg_count_error % "error")
raise SlipException("Program threw error with value: " % args[0].to_string())
@declare_native("map", simple=False)
def list_map(args, env, cont):
if not len(args) == 2:
raise SlipException(arg_count_error % "map")
fn = args[0]
if not isinstance(fn, W_Callable):
raise SlipException(arg_types_error % "map")
list = args[1]
if not isinstance(list, W_Pair):
raise SlipException(arg_types_error % "map")
return do_map(fn, list, env, cont)
def do_map(fn, list, env, cont):
from slipy.interpreter import return_value_direct
if not isinstance(list, W_Pair):
if list is not w_empty:
raise SlipException("map: malformed list")
return return_value_direct(w_empty, env, cont)
return fn.call([list.car()], env, MapStartContinuation(fn, list.cdr(), cont))
@declare_native("apply", simple=False)
def apply(args, env, cont):
if not len(args) == 2:
raise SlipException(arg_count_error % "apply")
fn = args[0]
if not isinstance(fn, W_Callable):
raise SlipException(arg_types_error % "apply")
if not isinstance(args[1], W_Pair):
raise SlipException(arg_types_error % "apply")
try:
actual_args = values_from_list(args[1])
except SlipException:
raise SlipException("apply: expected list")
return fn.call(actual_args, env, cont)
@declare_native(["call/cc", "call-with-current-continuation"], simple=False)
def callcc(args, env, cont):
if not len(args) == 1:
raise SlipException(arg_count_error % "call/cc")
fn = args[0]
if not isinstance(fn, W_Callable):
raise SlipException(arg_types_error % "call/cc")
return fn.call([W_Continuation(cont)], env, cont)
@declare_native("time")
def slip_time(args):
if not len(args) == 0:
raise SlipException(arg_count_error % "time")
return W_Float(time.time())
@declare_native("display")
def display(args):
if not len(args) == 1:
raise SlipException(arg_count_error % "display")
write(args[0].to_display())
return w_void
@declare_native("displayln")
def displayln(args):
if not len(args) == 1:
raise SlipException(arg_count_error % "displayln")
print args[0].to_display()
return w_void
@declare_native("newline")
def newline(args):
if not len(args) == 0:
raise SlipException(arg_count_error % "newline")
print ""
return w_void
@declare_native("void")
def void(args):
return w_void
@declare_native("list")
def list(args):
return list_from_values(args)
# TODO: Support more than 2 args
@declare_native("append")
def append(args):
if not len(args) == 2:
raise SlipException(arg_count_error % "append")
try:
v1 = values_from_list(args[0])
v2 = values_from_list(args[1])
return list_from_values(v1+v2)
except SlipException:
raise SlipException("append: expected proper lists as arguments")
@declare_native("cons")
def cons(args):
if not len(args) == 2:
raise SlipException(arg_count_error % "cons")
assert isinstance(args[0], W_SlipObject)
assert isinstance(args[1], W_SlipObject)
return W_Pair(args[0], args[1])
@declare_native("car")
def car(args):
if not len(args) == 1:
raise SlipException(arg_count_error % "car")
if not isinstance(args[0], W_Pair):
raise SlipException(arg_types_error % "car")
return args[0].car()
@declare_native("cdr")
def cdr(args):
if not len(args) == 1:
raise SlipException(arg_count_error % "cdr")
if not isinstance(args[0], W_Pair):
raise SlipException(arg_types_error % "cdr")
return args[0].cdr()
@declare_native("set-car!")
def set_car(args):
if not len(args) == 2:
raise SlipException(arg_count_error % "set-car!")
if not isinstance(args[0], W_Pair):
raise SlipException(arg_types_error % "set-car!")
args[0].set_car(args[1])
return args[1]
@declare_native("set-cdr!")
def set_cdr(args):
if not len(args) == 2:
raise SlipException(arg_count_error % "set-cdr!")
if not isinstance(args[0], W_Pair):
raise SlipException(arg_types_error % "set-cdr!")
args[0].set_cdr(args[1])
return args[1]
@declare_native("length")
@jit.unroll_safe
def list_length(args):
if not len(args) == 1:
raise SlipException(arg_count_error % "length")
if not isinstance(args[0], W_Pair):
raise SlipException(arg_types_error % "length")
length = 1
cur = args[0]
while True:
cdr = cur.cdr()
if isinstance(cdr, W_Pair):
cur = cdr
length += 1
elif cdr is w_empty:
return W_Integer(length)
else:
raise SlipException("Argument not a list!")
@declare_native("null?")
def is_null(args):
if not len(args) == 1:
raise SlipException(arg_count_error % "null?")
if args[0] is w_empty:
return w_true
else:
return w_false
@declare_native("read")
def read(args):
from slipy.parse import parse_data
# TODO: multiline input
# TODO: no more raw input
# TODO: read string
if not len(args) == 0:
raise SlipException(arg_count_error % "read")
input = raw_input('')
data = read_string(input)
return parse_data(data)
@declare_native("eval", simple=False)
def eval(args, env, cont):
from slipy.parse import parse_ast
from slipy.interpreter import interpret_with_env, return_value_direct
if not len(args) == 1:
raise SlipException(arg_count_error % "eval")
form = args[0]
# TODO: fix %s stuff
expanded = expand_string("(%s)" % form.to_string())
ast = parse_ast(expanded)
return_value = interpret_with_env(ast, env)
return return_value_direct(return_value, env, cont)
@declare_native("vector")
def vector(args):
return W_Vector(args, len(args))
@declare_native("make-vector")
def make_vector(args):
if not len(args) == 2:
raise SlipException(arg_count_error % "make-vector")
size = args[0]
if not isinstance(size, W_Integer):
raise SlipException(arg_types_error % "make-vector")
return W_Vector.make(size.value(), args[1])
@declare_native("vector-length")
def vector_length(args):
if not len(args) == 1:
raise SlipException(arg_count_error % "vector-length")
if not isinstance(args[0], W_Vector):
raise SlipException(arg_types_error % "vector-length")
return W_Integer(args[0].length())
@declare_native("vector-ref")
def vector_ref(args):
if not len(args) == 2:
raise SlipException(arg_count_error % "vector-ref")
idx = args[1]
if not isinstance(args[0], W_Vector):
raise SlipException(arg_types_error % "vector-ref")
if not isinstance(idx, W_Integer):
raise SlipException(arg_types_error % "vector-ref")
return args[0].ref(idx.value())
@declare_native("vector-set!")
def vector_set(args):
if not len(args) == 3:
raise SlipException(arg_count_error % "vector-set!")
idx = args[1]
if not isinstance(args[0], W_Vector):
raise SlipException(arg_types_error % "vector-set!")
if not isinstance(idx, W_Integer):
raise SlipException(arg_types_error % "vector-set!")
args[0].set(idx.value(), args[2])
return args[2]<file_sep>/slipy/environment.py
from rpython.rlib import jit
from slipy.exceptions import SlipException
from slipy.values import w_undefined
class Cell(object):
def __init__(self, value):
self._value = value
def get_value(self):
return self._value
def set_value(self, value):
self._value = value
class Env(object):
_immutable_fields_ = ["previous", "bindings[*]", "structure[*]", "scope"]
@jit.unroll_safe
def __init__(self, size, previous=None):
self.previous = previous
self.bindings = [None] * size
for i in range(0, size):
self.bindings[i] = Cell(w_undefined)
if previous:
self.structure = previous.structure + [self]
else:
self.structure = [self]
self.scope = len(self.structure) - 1
# @jit.elidable
def get_var(self, scope, offset):
env = self.structure[scope]
cell = env.bindings[offset]
return cell
def set_var(self, scope, offset, val):
cell = self.structure[scope].bindings[offset]
cell.set_value(val)
return val
def __str__(self):
return "#<env>"
class _GlobalEnv(object):
def __init__(self):
self.global_env = None
_global_env = _GlobalEnv()
def get_global_env():
return _global_env.global_env
def set_global_env(env):
_global_env.global_env = env<file_sep>/slipy/read.py
import os
from slipy.json import loads
from rpython.rlib.objectmodel import we_are_translated
from rpython.rlib import streamio
from rpython.rlib.rfile import create_popen_file
def _open_reader_python():
"NOT_RPYTHON"
from subprocess import Popen, PIPE
file_path = os.path.dirname(os.path.realpath(__file__))
reader_path = os.path.join(file_path, "read.rkt")
cmd = "racket -l racket/base -t %s -e \"(read-loop)\"" % (reader_path)
process = Popen(cmd, shell=True, stdin=PIPE, stdout=PIPE, stderr=PIPE)
return process
# TODO: Fix reader not detecting errors anymore!
class ReaderPython(object):
def __init__(self):
"NOT_RPYTHON"
self.proc = _open_reader_python()
def _restart(self):
"NOT_RPYTHON"
self.proc = _open_reader_python()
def _assure_running(self):
"NOT_RPYTHON"
if not(self.is_running()):
self._restart()
self._assure_running()
def _try_read(self, str):
"NOT_RPYTHON"
self._assure_running()
self.proc.stdin.write(str.encode("utf-8"))
self.proc.stdin.write("\n\0\n")
self.proc.stdin.flush()
if not(self.is_running()):
raise Exception("Reader died whilst reading")
return self.proc.stdout.readline()
def is_running(self):
"NOT_RPYTHON"
self.proc.poll()
return not(self.proc.returncode)
# TODO: Fix code duplication
# TODO: Maybe those %s need to be between double quotes
def read(self, str):
"NOT_RPYTHON"
raw = self._try_read("(read %s)" % str)
data = loads(raw)
assert data.is_object
data = data.object_value()
if data['success']:
return data['content']
else:
raise Exception("Read error: %s" % data['content'])
def expand(self, str):
"NOT_RPYTHON"
raw = self._try_read("(expand %s)" % str)
data = loads(raw)
assert data.is_object
data = data.object_value()
if data['success']:
return data['content']
else:
raise Exception("Read error: %s" % data['content'])
def terminate(self):
"NOT_RPYTHON"
self.proc.terminate()
class ReaderRPython(object):
def _call_reader_rpython(self, modus, input):
tmp_file = os.tmpnam()
cmd = "racket -l racket/base -t %s -e '(do-read \"%s\")'" % ("read.rkt", tmp_file)
if not os.access("read.rkt", os.R_OK):
raise Exception("Racket reader can not be accessed")
pipe = create_popen_file(cmd, "w")
input = "(%s %s)" % (modus, input)
# NOTE: might go wrong when dealing with UNICODE
pipe.write(input)
pipe.write("\n\0\n")
pipe.flush()
err = os.WEXITSTATUS(pipe.close())
if err != 0:
raise Exception("Reader produced an unexpected error")
return tmp_file
def _parse_json_file(self, path):
f = streamio.open_file_as_stream(path)
s = f.readall()
f.close()
os.remove(path)
json_data = loads(s)
assert json_data.is_object
json_data = json_data.object_value()
if json_data['success']:
return json_data['content']
else:
raise Exception("Read error")
def expand(self, str):
json_file = self._call_reader_rpython("expand", str)
json_data = self._parse_json_file(json_file)
return json_data
def read(self, str):
json_file = self._call_reader_rpython("read", str)
json_data = self._parse_json_file(json_file)
return json_data
# We can't modify global vars, so store mutable global vars in a class
class Reader(object):
def __init__(self):
self.reader = None
_reader = Reader()
def read_string(str):
if we_are_translated():
assert isinstance(_reader.reader, ReaderRPython)
return _reader.reader.read(str)
def expand_string(str):
if we_are_translated():
assert isinstance(_reader.reader, ReaderRPython)
return _reader.reader.expand(str)
def expand_file(path):
if we_are_translated():
assert isinstance(_reader.reader, ReaderRPython)
if not os.access(path, os.R_OK):
raise Exception("Can not read file")
f = streamio.open_file_as_stream(path)
s = f.readall()
f.close()
return _reader.reader.expand("(%s)" % s)
def init_reader():
if we_are_translated():
_reader.reader = ReaderRPython()
else:
_reader.reader = ReaderPython()<file_sep>/Makefile
#/usr/bin/make -f
#
# Path of pypy checkout
PYPYPATH ?= /Users/nvgeele/Dev/pypy
# Invocation of pytest, defaults to pypy's stuff
# but may also be `py.test`
PYTEST ?= $(PYPYPATH)/pytest.py
RPYTHON ?= $(PYPYPATH)/rpython/bin/rpython --batch
TRANSLATE_TARGETS := translate-jit
PYFILES := $(shell find . -name '*.py' -type f)
translate-jit-all: $(TRANSLATE_TARGETS)
all: translate-jit-all translate-no-jit
translate-jit: slipy-c
translate-no-jit: slipy-c-nojit
slipy-c: $(PYFILES)
$(RPYTHON) -Ojit --gc=minimark targetslipy.py
slipy-c-nojit: $(PYFILES)
$(RPYTHON) targetslipy.py<file_sep>/slipy/interpreter.py
from slipy.AST import *
from slipy.continuation import empty_continuation
from slipy.environment import *
from slipy.exceptions import EvaluationFinished
from rpython.rlib import jit
def get_printable_location(ast, prev):
return ast.to_string()
driver = jit.JitDriver(reds=["env", "cont"], greens=["ast", "prev"],
get_printable_location=get_printable_location)
def initialize_global_env():
from slipy.natives import native_dict
env = Env(len(native_dict))
for sym, tup in native_dict.iteritems():
offset, native = tup
env.set_var(0, offset, native)
# return env
set_global_env(env)
def return_value_direct(value, env, cont):
return cont.cont(value, env)
def _interpret(ast, env, cont):
prev = ast
while True:
driver.jit_merge_point(ast=ast, prev=prev,
env=env, cont=cont)
if isinstance(ast, Application):
prev = ast
t = type(ast)
if t is Let:
ast, env, cont = ast.eval(env, cont)
elif t is If:
ast, env, cont = ast.eval(env, cont)
elif t is Sequence:
ast, env, cont = ast.eval(env, cont)
else:
ast, env, cont = ast.eval(env, cont)
if isinstance(ast, Application):
driver.can_enter_jit(ast=ast, prev=prev,
env=env, cont=cont)
def interpret_with_env(ast, env):
try:
cont = empty_continuation
_interpret(ast, env, cont)
except EvaluationFinished, e:
return e.value
def interpret_with_global(ast):
return interpret_with_env(ast, get_global_env())
<file_sep>/slipy/values.py
from rpython.rlib import jit
from slipy.exceptions import *
from slipy.continuation import *
_symbol_pool = {}
class W_SlipObject(object):
def equal(self, obj):
return self is obj
def __str__(self):
return "<W_SlipObject>"
def to_string(self):
return self.__str__()
def to_display(self):
return self.__str__()
class W_Pair(W_SlipObject):
# TODO: implement to_display
def __init__(self, car, cdr):
self._car = car
self._cdr = cdr
def car(self):
return self._car
def cdr(self):
return self._cdr
def set_car(self, val):
self._car = val
def set_cdr(self, val):
self._cdr = val
def _to_lstring(self):
car = self._car.to_string()
cdr = self._cdr
if isinstance(cdr, W_Pair):
return "%s %s" % (car, cdr._to_lstring())
elif cdr is w_empty:
return car
else:
return "%s . %s" % (car, cdr.to_string())
def __str__(self):
# TODO: fix if quote is first symbol
return "(%s)" % self._to_lstring()
class W_Vector(W_SlipObject):
_immutable_fields_ = ["len"]
def __init__(self, values, length):
self._values = values
self.len = length
@staticmethod
@jit.unroll_safe
def make(length, val):
vals = [val] * length
return W_Vector(vals, length)
def ref(self, idx):
if idx >= self.len:
raise SlipException("index out of bounds")
return self._values[idx]
def set(self, idx, val):
if idx >= self.len:
raise SlipException("index out of bounds")
self._values[idx] = val
def length(self):
return self.len
def __str__(self):
vals = [None] * self.len
for i, val in enumerate(self._values):
vals[i] = val.to_string()
vals = " ".join(vals)
return "(vector %s)" % vals
class W_Null(W_SlipObject):
def __str__(self):
return "()"
class W_Number(W_SlipObject):
is_int = is_float = False
def add(self, other):
raise Exception("abstract method")
def sub(self, other):
raise Exception("abstract method")
def mul(self, other):
raise Exception("abstract method")
def div(self, other):
raise Exception("abstract method")
def is_eq(self, other):
raise Exception("abstract method")
def lt(self, other):
raise Exception("abstract method")
def gt(self, other):
raise Exception("abstract method")
def le(self, other):
raise Exception("abstract method")
def ge(self, other):
raise Exception("abstract method")
class W_Integer(W_Number):
_immutable_fields_ = ["_val"]
is_int = True
def __init__(self, value):
self._val = int(value)
def value(self):
return self._val
def __str__(self):
return str(self._val)
def add(self, other):
if isinstance(other, W_Float):
return W_Float(float(self._val) + other.value())
else:
return W_Integer(self._val + other.value())
def sub(self, other):
if isinstance(other, W_Float):
return W_Float(float(self._val) - other.value())
else:
return W_Integer(self._val - other.value())
def mul(self, other):
if isinstance(other, W_Float):
return W_Float(float(self._val) * other.value())
else:
return W_Integer(self._val * other.value())
def div(self, other):
if isinstance(other, W_Float):
return W_Float(float(self._val) / other.value())
else:
return W_Integer(self._val / other.value())
def is_eq(self, other):
if isinstance(other, W_Integer):
return self._val == other.value()
else:
return other.is_eq(self)
def lt(self, other):
if isinstance(other, W_Integer):
return self._val < other.value()
else:
return other.gt(self)
def gt(self, other):
if isinstance(other, W_Integer):
return self._val > other.value()
else:
return other.lt(self)
def le(self, other):
if isinstance(other, W_Integer):
return self._val <= other.value()
else:
return other.ge(self)
def ge(self, other):
if isinstance(other, W_Integer):
return self._val >= other.value()
else:
return other.le(self)
class W_Float(W_Number):
_immutable_fields_ = ["_val"]
is_float = True
def __init__(self, value):
self._val = float(value)
def value(self):
return self._val
def __str__(self):
return str(self._val)
def add(self, other):
if isinstance(other, W_Integer):
return W_Float(self._val + float(other.value()))
else:
return W_Float(self._val + other.value())
def sub(self, other):
if isinstance(other, W_Integer):
return W_Float(self._val - float(other.value()))
else:
return W_Float(self._val - other.value())
def mul(self, other):
if isinstance(other, W_Integer):
return W_Float(self._val * float(other.value()))
else:
return W_Float(self._val * other.value())
def div(self, other):
if isinstance(other, W_Integer):
return W_Float(self._val / float(other.value()))
else:
return W_Float(self._val / other.value())
def is_eq(self, other):
if isinstance(other, W_Float):
return self._val == other.value()
else:
return self._val == float(other.value())
def lt(self, other):
if isinstance(other, W_Float):
return self._val < other.value()
else:
return self._val < float(other.value())
def gt(self, other):
if isinstance(other, W_Float):
return self._val > other.value()
else:
return self._val > float(other.value())
def le(self, other):
if isinstance(other, W_Float):
return self._val <= other.value()
else:
return self._val <= float(other.value())
def ge(self, other):
if isinstance(other, W_Float):
return self._val >= other.value()
else:
return self._val >= float(other.value())
class W_Boolean(W_SlipObject):
_immutable_fields_ = ["_value"]
def __init__(self, value):
self._value = value
@staticmethod
def from_value(value):
return w_true if value else w_false
def __str__(self):
return "#t" if self._value else "#f"
class W_Symbol(W_SlipObject):
_immutable_fields_ = ["_str"]
def __init__(self, str):
self._str = str
@staticmethod
def from_string(str):
if str in _symbol_pool:
return _symbol_pool[str]
else:
sym = W_Symbol(str)
_symbol_pool[str] = sym
return sym
def __str__(self):
return self._str
class W_String(W_SlipObject):
def __init__(self, str):
self._str = str
def __str__(self):
return "\"%s\"" % self._str
def to_display(self):
return self._str
class W_Callable(W_SlipObject):
def call(self, args, env, cont):
raise Exception("abstract base class")
class W_NativeFunction(W_Callable):
_immutable_fields_ = ["func"]
def __init__(self, func):
self._func = func
def call(self, args, env, cont):
return self._func(args, env, cont)
def __str__(self):
return "#<native>"
class W_Closure(W_Callable):
_immutable_fields_ = ["args[*]", "vars[*]", "env", "body"]
def __init__(self, args, vars, env, body):
from slipy.AST import Sequence
self.args = args
self.env = env
self.body = Sequence(body)
self.vars = vars
@jit.unroll_safe
def call(self, args, env, cont):
from slipy.environment import Env
if len(args) != len(self.args):
raise SlipException("Incorrect length of argument list")
new_env = Env(len(args)+len(self.vars), previous=self.env)
for i, val in enumerate(args):
new_env.set_var(new_env.scope, i, val)
return self.body, new_env, cont
def __str__(self):
return "#<closure>"
class W_Continuation(W_Callable):
_immutable_fields_ = ["_cont"]
def __init__(self, cont):
self._cont = cont
def call(self, args, env, cont):
from slipy.interpreter import return_value_direct
# TODO: deal with too much args etc
if args:
return return_value_direct(args[0], env, self._cont)
else:
return return_value_direct(w_void, env, self._cont)
def __str__(self):
return "#<continuation>"
class W_Undefined(W_SlipObject):
def __str__(self):
return "#<undefined>"
class W_Void(W_SlipObject):
def __str__(self):
return "#<void>"
# Default values
w_empty = W_Null()
w_true = W_Boolean(True)
w_false = W_Boolean(False)
w_undefined = W_Undefined()
w_void = W_Void()
def is_true(val):
return not(isinstance(val, W_Boolean) and val is w_false)
@jit.unroll_safe
def list_from_values(vals):
if len(vals) == 0:
return w_empty
else:
cur = w_empty
for i in range(len(vals)-1, -1, -1):
cur = W_Pair(vals[i], cur)
return cur
def values_from_list(pair):
result = []
curr = pair
while isinstance(curr, W_Pair):
result.append(curr.car())
curr = curr.cdr()
if curr is w_empty:
return result
else:
raise SlipException("Improper list")<file_sep>/slipy/continuation.py
from slipy.exceptions import EvaluationFinished
from slipy.values import *
class Continuation(object):
def cont(self, val, env):
# Should return ast, env, cont
raise Exception("abstract base class")
def depth(self):
raise Exception("abstract base class")
class EmptyContinuation(Continuation):
def cont(self, val, env):
raise EvaluationFinished(val)
def depth(self):
return 1
empty_continuation = EmptyContinuation()
class LetContinuation(Continuation):
_immutable_fields_ = ["let", "prev", "cont", "i"]
def __init__(self, let, i, env, cont):
self.let = let
self.i = i
self.prev = cont
self.env = env
def cont(self, val, env):
self.env.set_var(self.env.scope, self.i, val)
return self.let.make_cont(self.env, self.prev, self.i + 1)
def depth(self):
return 1 + self.prev.depth()
class SequenceContinuation(Continuation):
_immutable_fields_ = ["seq", "env", "prev", "i"]
def __init__(self, seq, i, env, prev):
self.seq = seq
self.prev = prev
self.env = env
self.i = i
def cont(self, val, env):
return self.seq.make_cont(self.env, self.prev, self.i)
def depth(self):
return 1 + self.prev.depth()
class MapStartContinuation(Continuation):
_immutable_fields_ = ["fn", "list", "prev"]
def __init__(self, fn, list, prev):
self.fn = fn
self.list = list
self.prev = prev
def cont(self, val, env):
from slipy.natives import do_map
cont = MapBuildContinuation(val, self.prev)
return do_map(self.fn, self.list, env, cont)
def depth(self):
return 1 + self.prev.depth()
class MapBuildContinuation(Continuation):
_immutable_fields_ = ["val", "prev"]
def __init__(self, val, prev):
self.val = val
self.prev = prev
def cont(self, val, env):
from slipy.interpreter import return_value_direct
return return_value_direct(W_Pair(self.val, val), env, self.prev)
def depth(self):
return 1 + self.prev.depth()<file_sep>/slipy/parse.py
from slipy.AST import *
from slipy.natives import native_dict
from slipy.values import *
def _parse_num(obj):
assert obj['val'].is_num
assert obj['int'].is_bool
if obj['int'].bool_value():
return W_Integer(obj['val'].num_value())
else:
return W_Float(obj['val'].num_value())
def _parse_value(val):
assert val['type'].is_string
type = val['type'].string_value()
if type == 'number':
return _parse_num(val)
elif type == 'bool':
assert val['val'].is_bool
return W_Boolean.from_value(val['val'].bool_value())
elif type == 'symbol':
assert val['val'].is_string
return W_Symbol.from_string(val['val'].string_value())
elif type == 'string':
assert val['val'].is_string
return W_String(val['val'].string_value())
else:
raise Exception("_parse_value exception")
class _ListBuilder(object):
@staticmethod
def _get_value(val):
if val.is_list:
return _ListBuilder._helper(val.list_value())
else:
assert val.is_object
return _parse_value(val.object_value())
@staticmethod
def _helper(vals):
if not vals:
return w_empty
car = _ListBuilder._get_value(vals[0])
cdr = _ListBuilder._helper(vals[1:])
return W_Pair(car, cdr)
@staticmethod
def build_list(vals):
return _ListBuilder._helper(vals)
def _parse_list(values):
return _ListBuilder.build_list(values)
def _vars_to_syms(vars):
syms = [None] * len(vars)
for i, var in enumerate(vars):
assert var.is_string
syms[i] = W_Symbol.from_string(var.string_value())
return syms
def _parse_exp_list(lst):
exprs = []
for exp in lst:
assert exp.is_object
exprs.append(_parse_dict(exp.object_value()))
return exprs
def _parse_dict(dict):
assert dict['type'].is_string
type = dict['type'].string_value()
if type == 'lambda':
# TODO: Support lambda's like (lambda x x)
assert dict['params'].is_list
assert dict['vars'].is_list
assert dict['body'].is_list
args = _vars_to_syms(dict['params'].list_value())
vars = _vars_to_syms(dict['vars'].list_value())
body = _parse_exp_list(dict['body'].list_value())
return Lambda(args, vars, body)
elif type == 'begin':
assert dict['body'].is_list
body = _parse_exp_list(dict['body'].list_value())
if len(body) == 0:
raise SlipException("Empty begin form is not allowed!")
return Sequence(body)
elif type == 'quoted-list':
assert dict['val'].is_list
list = _parse_list(dict['val'].list_value())
return Quote(list)
elif type == 'symbol':
assert dict['val'].is_string
obj = W_Symbol.from_string(dict['val'].string_value())
return Quote(obj)
elif type == 'number':
obj = _parse_num(dict)
return Quote(obj)
elif type == 'bool':
assert dict['val'].is_bool
obj = W_Boolean.from_value(dict['val'].bool_value())
return Quote(obj)
elif type == 'string':
assert dict['val'].is_string
obj = W_String(dict['val'].string_value())
return Quote(obj)
elif type == 'nat-ref':
assert dict['symbol'].is_string
symbol = W_Symbol.from_string(dict['symbol'].string_value())
offset, _ = native_dict[symbol]
return VarRef(symbol, 0, offset)
elif type == 'lex-ref':
assert dict['symbol'].is_string
assert dict['scope'].is_num
assert dict['offset'].is_num
symbol = W_Symbol.from_string(dict['symbol'].string_value())
scope = dict['scope'].num_value()
offset = dict['offset'].num_value()
return VarRef(symbol, scope, offset)
elif type == 'if':
assert dict['test'].is_object
assert dict['consequent'].is_object
assert dict['alternative'].is_object
condition = _parse_dict(dict['test'].object_value())
consequent = _parse_dict(dict['consequent'].object_value())
alternative = _parse_dict(dict['alternative'].object_value())
return If(condition, consequent, alternative)
elif type == 'set':
assert dict['target'].is_object
assert dict['val'].is_object
target = _parse_dict(dict['target'].object_value())
val = _parse_dict(dict['val'].object_value())
assert isinstance(target, VarRef)
return SetBang(target.sym, target.scope, target.offset, val)
elif type == 'apl':
assert dict['operator'].is_object
assert dict['operands'].is_list
operator = _parse_dict(dict['operator'].object_value())
operands = _parse_exp_list(dict['operands'].list_value())
return Application(operator, operands)
elif type == 'let':
assert dict['vars'].is_list
assert dict['vals'].is_list
assert dict['body'].is_list
assert dict['decls'].is_list
vars = _vars_to_syms(dict['vars'].list_value())
decls = _vars_to_syms(dict['decls'].list_value())
body = _parse_exp_list(dict['body'].list_value())
vals = _parse_exp_list(dict['vals'].list_value())
return Let(vars, vals, decls, body)
else:
raise Exception("Invalid key")
def _parse_program(program):
assert program.is_object
program = program.object_value()
assert program['vars'].is_list
assert program['exps'].is_list
vars = _vars_to_syms(program['vars'].list_value())
exprs = _parse_exp_list(program['exps'].list_value())
return Program(vars, exprs)
def parse_ast(json):
return _parse_program(json)
def parse_data(data):
assert data.is_object
data = data.object_value()
assert data['type'].is_string
if data['type'].string_value() == 'quoted-list':
assert data['val'].is_list
return _parse_list(data['val'].list_value())
else:
return _parse_value(data)<file_sep>/slipy/util.py
import os
# http://python-forum.org/viewtopic.php?t=6185&p=8031
def raw_input(prompt, c=31366):
os.write(1, prompt)
res = os.read(0, c)
return res.rstrip('\n')
def write(text):
os.write(1, text)
# RPython has no zip, unfortunately
def zip(l1, l2):
if not l1:
return []
if not l2:
return []
return [(l1[0], l2[0])] + zip(l1[1:], l2[1:])<file_sep>/slipy/exceptions.py
from rpython.rlib.objectmodel import we_are_translated
#
# Standard error messages
#
arg_count_error = "%s: the correct amount of arguments was not supplied"
arg_types_error = "%s: one or more operators have an incorrect type"
#
# Exceptions for use in Slipy
#
class SlipException(Exception):
def __init__(self, msg):
if not we_are_translated():
super(SlipException, self).__init__(msg)
self.message = msg
class EvaluationFinished(Exception):
def __init__(self, val):
if not we_are_translated():
msg = "Evaluation finished with: %s" % val.to_string()
super(EvaluationFinished, self).__init__(msg)
self.value = val
|
c47016e1bc2567218aeee58d1f8764ec8acc182d
|
[
"Markdown",
"Python",
"Makefile",
"Shell"
] | 15 |
Python
|
nvgeele/slipy
|
d7a66b2c29b57a4b2298d8a0123055e8168e043b
|
551508769f962ba1ecfcb0e122aec30b12a03891
|
refs/heads/master
|
<repo_name>zaenalmusthofa86/Praktikum_5<file_sep>/README.md
# Praktikum_5
# Latihan 1 (Mencari nilai terbesar dari sejumlah bilangan yang diinputkan dengan fungsi loop Do While)
```
-Mendeklarasikan variable int a,max=0; sebagai variable input
-Memasukkan bilangannya dengan perintah perulangan do
-Menentukan bilangan yang terbesar dengan rumus if (a>max)
max=a;
-Memeriksa bilangan yang kita masukkan dengan perintah while (a!=0);
-Memasukkan angka 0,maka perintah perulangan akan berhenti
-Menampilkan hasil nilai terbesar kelayar dengan kode: cout << max;
-Berikut kode lengkapnya:
#include <iostream>
using namespace std;
int main()
{
int a,max=0;
do
{
cout << "Masukkan bilangannya :";
cin >> a;
if (a>max)
max=a;
}
while (a!=0);
cout << max;
return 0;
}
-Berikut Pseudocodenya:
1. a= ... a <--
2. while (a!=0)
3. Do.. if (a>max) (max=a)
4. WRITE (max)
```
-Berikut Flowchatnya:

-Berikut Screenshotnya:

# Latihan 2 (Contoh penggunaan program logika operator OR [||])
```
-Mendeklarasikan variable int a,b,c; sebagai variable input
-Memasukkan bilangannya dengan perintah:
cout << "Masukkan nilai pertama :";
cin >> a;
cout << "Masukkan nilai kedua :";
cin >> b;
cout << "Masukkan nilai ketiga :";
cin >> c;
-Menentukan hasil nilai BENAR atau SALAH dengan rumus: if ( (a+b==c) || (a+c==b))
*Pernyataan BENAR apabila salah satu bilangan merupakan jumlah dari dua bilangan yang lain
*Pernyataan SALAH apabila tidak ada bilangan yang merupakan jumlah dari dua bilangan yang lain
-Menampilkan hasil nilai BENAR atau SALAH kelayar dengan kode:
{
cout << "BENAR";
}
else if ( (b+c==a))
{
cout << "BENAR";
}
else
{
cout << "SALAH";
}
-Berikut kode lengkapnya:
#include <iostream>
using namespace std;
int main()
{
int a,b,c;
cout << "Masukkan nilai pertama :";
cin >> a;
cout << "Masukkan nilai kedua :";
cin >> b;
cout << "Masukkan nilai ketiga :";
cin >> c;
if ((a+b==c) || (a+c==b))
{
cout << "BENAR";
}
else if ((b+c==a))
{
cout << "BENAR";
}
else
{
cout << "SALAH";
}
return 0;
}
-Berikut Pseudocodenya:
1. a =.... a <--
2. b =.... b <--
3. c =.... c <--
4. if((a+b==c || a+c==b))
5. WRITE (BENAR)
6. else if ((b+c==a))
7. WRITE (BENAR)
8. else
9. WRITE(SALAH)
```
-Berikut Flowchatnya:

-Berikut Screenshotnya:
1.Pernyataan BENAR(1)

2.Pernyataan BENAR(2)

3.Pernyataan BENAR(3)

4.Pernyataan SALAH
<file_sep>/Latihan1/main.cpp
#include <iostream>
using namespace std;
int main()
{
int a,max=0;
do
{
cout << "Masukkan bilangannya :";
cin >> a;
if (a>max)
max=a;
}
while (a!=0);
cout << max;
return 0;
}
<file_sep>/Latihan2/main.cpp
#include <iostream>
using namespace std;
int main()
{
int a,b,c;
cout << "Masukkan nilai pertama :";
cin >> a;
cout << "Masukkan nilai kedua :";
cin >> b;
cout << "Masukkan nilai ketiga :";
cin >> c;
if ( (a+b==c) || (a+c==b))
{
cout << "BENAR";
}
else if ( (b+c==a))
{
cout << "BENAR";
}
else
{
cout << "SALAH";
}
return 0;
}
|
d06ac6eaa91d5b6ad735c7db59069975a56d0097
|
[
"Markdown",
"C++"
] | 3 |
Markdown
|
zaenalmusthofa86/Praktikum_5
|
0004accf94377ecc36936ba19dbff67867896acb
|
c97b9d63cccef81a60508c1cf1b9eb125e7a4a09
|
refs/heads/master
|
<repo_name>mfikria/sirclo-test<file_sep>/cart/solution-cart.js
class Cart {
constructor() {
this.products = new Map();
}
addProduct(productCode, quantity) {
if(this.products.has(productCode)) {
this.products.set(productCode, quantity + this.products.get(productCode));
} else {
this.products.set(productCode, quantity);
}
}
removeProduct(productCode) {
if(this.products.has(productCode)) {
this.products.delete(productCode)
} else {
//throw error
// console.log("product not found");
}
}
showCart() {
this.products.forEach(function(quantity, productCode) {
console.log(productCode + " (" + quantity + ")");
});
}
}
// Initialize cart
var cart = new Cart();
// Test case 1
cart.addProduct("Baju Merah Mantap", 1);
cart.addProduct("Baju Merah Mantap", 3);
cart.addProduct("Bukuku", 3);
cart.removeProduct("Bukuku");
cart.addProduct("Singlet Hijau", 1);
cart.removeProduct("ProdukBohongan");
cart.showCart();<file_sep>/climate-app/index.js
function getAverage(arr) {
var sum = arr.reduce(function(a, b) { return a + b; });
console.log(sum);
return (sum / arr.length);
}
function generateTableHeader(cityName) {
return '<thead><tr><th>' + cityName + '</th><th>Temperature</th><th>Variance</th></tr></thead>';
}
function generateTableFooter(temperature, variance) {
return '<tr><td><strong>Average</strong></td><td><strong>' + temperature.toFixed(0) + 'C</strong></td><td><strong>' + variance.toFixed(2) + 'C</strong></td></tr>';
}
function generateTableData(date, temperature, variance) {
return '<tr><td>' + date + '</td><td>' + temperature.toFixed(0) + 'C</td><td>' + variance.toFixed(2) +'C</td></tr>';
}
function fetchData(e) {
var cityId = document.forms['climate-form']['city-list'].value;
var dates = new Array();
var temperatures = new Array();
var variances = new Array();
var averages = new Array();
var data = new Array();
fetch('https://api.openweathermap.org/data/2.5/forecast/daily?q=' + cityId + '&mode=json&units=metric&cnt=5&APPID=481e3bc28e5264e5607c2b65b449bfc1', {method: 'GET'}).then(function(response) {
return response.json();
}).then(function(response) {
var cityName = response['city']['name'];
response['list'].forEach(function(condition){
// Collect temperature data and fix the format
temperatures.push(parseFloat(condition['temp']['day']));
// Collect variance data and fix the format
variances.push(parseFloat(condition['temp']['max'] - condition['temp']['min']));
// Collect date data and fix the format
var date = new Date(condition['dt'] * 1000);
dates.push(date.toISOString().substring(0, 10));
});
// debug
console.dir(temperatures);
console.dir(variances);
console.dir(dates);
// Generate table data
var header = generateTableHeader(cityName);
var footer = generateTableFooter(getAverage(temperatures), getAverage(variances));
for(i = 0; i < temperatures.length; i++) {
data.push(generateTableData(dates[i], temperatures[i], variances[i]));
}
document.getElementById('result-table').innerHTML = header + data.join('') + footer;
});
// prevent page to reload
return false;
}<file_sep>/fivaa/solution-fivaa.js
function fivaa(rowNum) {
for (i = rowNum - 1; i >= 0; i--) {
// Part1 is the first two number, Part2 is the rest
var part1 = i.toString() + i.toString();
var part2 = new Array(i + 1).fill(i + 2).join('');
var val = part1 + part2;
console.log(val);
}
}
// Input is row number
fivaa(5);
|
d96a617822d9385ca104161fddfa87314815dddf
|
[
"JavaScript"
] | 3 |
JavaScript
|
mfikria/sirclo-test
|
bddc25d95641ff085905110f7730ad5890ab8d41
|
415846b008927e1a6683bdb065fbdbbee36b78bc
|
refs/heads/master
|
<file_sep>/*
1:
x=5-3
x=2
Trenger: Slå sammen ax^n og bx^n i sum, også n=0
Kan lages generell (slår sammen alt som kan slås sammen)
"🠞🠜 merge terms"
Drag-and-drop: på hverandre
2:
x+3=5
x+3-3=5-3
x=2
Trenger: Trekke fra ledd på begge sider.
Enten velge ledd, eller skrive inn hva som skal trekkes fra.
"|− subtract from both sides of equation"
Drag-and-drop: dra over på andre siden
3:
x+3=5
x=5-3
x=2
Trenger: Flytte ledd til andre siden av ligningen
Enten velge ledd, eller skrive inn (men hva om flere?)
"↻ move term"
Drag-and-drop: dra over på andre siden (setting endret)
4:
5x-7=4x
5x-4x-7=0
x-7=
x=7
Trenger: Slå sammen ax^n og bx^n i sum, også n=0
Kan lages generell (slår sammen alt som kan slås sammen)
"🠞🠜 merge terms"
Drag-and-drop: dra over hverandre
5:
5x-1=2x
5x-2x=1
3x=1
x=1/3
Trenger: Dele begge sider av ligningen
Kan velge eller skrive inn.
"|÷ divide both sides of equation by"
Drag-and-drop: dra under ligningen
6:
5x-3=2x
5x-2x=3
3x=3
x=3/3
Trenger: Forkorte (like faktorer)
Kan lages generell (forkorter alt som kan forkortes)
"⤪ Reduce fraction"
Drag-and-drop: dra del av teller mot del av nevner - eller motsatt
7:
9x+2=3x+6
6x=4
x=4/6
x=2*2/2*3
x=2/3
Trenger: Primtallsfaktorisere
Velge eller skrive inn. (Kan lages generell, ved at alle tall primtallsfaktoriseres)
"⤶⤷ Prime factorize"
Drag-and-drop: Dra tall over ligningen
Ledd:
- Primtallsfaktorisere
- Slå sammen ax^n og bx^n i sum, også n=0
- Slå sammen like ledd til 2*ledd
- Trekke fra ledd på begge sider.
- Flytte ledd til andre siden av ligningen
- Trekke ut felles faktor fra sum
- Dele begge sider av ligningen
Brøk
- Bytte fortegn (gange teller og nevner med -1)
- Gjøre om til desimaltall
- Pluss, minus, gange, dele
- Trekke ut faktor fra teller (også -1?)
- Gange hele ligningen med det som er i nevneren
- Forkorte (like faktorer)
Faktor
- Slå sammen konstantfaktorer i samme produkt
- Lage kvadrat av to like faktorer
- Gange ut sum med de andre faktorene (løse ut parentes)
Ligning:
- Bytte fortegn (gange begge sider med -1)
Usikker på disse:
- ConvertComplexNumberToSumOfSimpleNumbersOperation
- ConvertComplexNumberInFactorToSumOfSimpleNumbersOperation
- ConvertComplexNumberToSumOfSimpleNumbersOperation
*/<file_sep>const model = {
level: 1,
mathText: 'x=1+2',
ownExercise: '',
showVideos: true,
history: {
items: [],
index: 0,
},
steps: {
selectOneTerm: 'Velg et ledd.',
selectFactorInNumerator: 'Velg en faktor i telleren.',
selectFactorInDenominator: 'Velg en faktor i nevneren.',
selectTopLevelFactor: 'Velg et ledd eller en faktor i et ledd.',
selectNumber: 'Velg et tall.',
},
mathOperations: {},
youTubeVideoIds: [
'',
'4yY3GJ2VJR8',
'ppjutK7iwu8',
'kPK-rbW7Jy8',
'zAbQeidbWdc',
'rgdP8DK9cQ8',
'QejFqIPpos4',
],
};
model.mathOperations[mergeTerms.name] = {
steps: ['selectOneTerm', 'selectOneTerm'],
icon: '∑',//'⭨⭩\n•',
svg: {
path: 'M 3.34 4.7 h -3.2577 v -0.5264 l 1.8548 -1.8411 l -1.8273 -1.7391 v -0.5181 h 3.1226 v 0.4851 h -2.4557 l 1.7859 1.6564 v 0.1984 l -1.8355 1.7997 h 2.6128 z',
viewBox: {
width: 3.34,
height: 4.7,
}
},
description: 'Slå sammen ledd',
levels: {
first: 1,
}
};
model.mathOperations[subtractTermOnBothSides.name] = {
steps: ['selectOneTerm'],
icon: '|−',
svg: {
path: 'M 0.5796 5.4688 h -0.4796 v -5.3688 h 0.4796 z m 3 -2.6 h -1.7198 v -0.4989 h 1.7198 z',
viewBox: {
width: 3.5,
height: 5.5,
}
},
description: 'Trekke fra på begge sider av ligningen',
levels: {
first: 2,
last: 3,
}
};
model.mathOperations[moveTermToOtherSide.name] = {
steps: ['selectOneTerm'],
icon: '↷\n=',
svg: {
path: 'M 0.1 5.8787 q 0 -2.3974 1.6946 -4.0841 q 1.6946 -1.6946 4.0841 -1.6946 q 1.7805 0 3.1783 0.9371 q 1.3978 0.9293 2.1631 2.6316 l 1.1323 -1.3041 l -0.5076 3.8186 l -3.0533 -2.3427 q 1.3822 0.2968 1.718 0.125 l -0.2108 -0.4451 q -1.4915 -2.6551 -4.4121 -2.6551 q -2.0772 0 -3.5453 1.4681 q -1.4681 1.4681 -1.4681 3.5453 z m 8.7 2 h -6.1288 v -0.8268 h 6.1288 z m 0 2.1704 h -6.1288 v -0.8268 h 6.1288 z',
viewBox: {
width: 12.5,
height: 10.5,
}
},
description: 'Flytte ledd til den andre siden av ligningen',
levels: {
first: 4,
}
};
model.mathOperations[divideBothSides.name] = {
steps: ['selectTopLevelFactor'],
icon: '|÷',
svg: {
path: 'M 0.5796 5.4688 h -0.4796 v -5.3688 h 0.4796 z m 3 -2.6 h -1.7198 v -0.4989 h 1.7198 z m -0.6 -0.8 h -0.5 v -0.4989 h 0.5 z m 0 1.6 h -0.5 v -0.4989 h 0.5 z',
viewBox: {
width: 3.5,
height: 5.5,
}
},
description: 'Dele begge sider av ligningen',
levels: {
first: 5,
}
};
model.mathOperations[reduceFraction.name] = {
steps: ['selectFactorInNumerator', 'selectFactorInDenominator'],
icon: '/\n‒\n/',
svg: {
path: 'M 10 1 l -3.7052 9.6118 h -0.894 l 3.6897 -9.6118 z m 3 12 h -12 v -0.8268 h 12 z m -3 1.3 l -3.7052 9.6118 h -0.894 l 3.6897 -9.6118 z',
viewBox: {
width: 14,
height: 24,
}
},
description: 'Forkorte brøk',
levels: {
first: 5,
}
};
model.mathOperations[primeFactorize.name] = {
steps: ['selectNumber'],
icon: '□\n⭩⭨\n□×□',
svg: {
path: 'm 6.2 0.1 h 4.8782 v 4.8782 h -4.8782 z m 0.3927 0.3927 v 4.0928 h 4.0928 v -4.0928 z m -4 10.5 v -2.7027 l 1.08 1.08 l 4.0566 -4.0566 l 0.5426 0.5426 l -4.0566 4.0566 l 1.08 1.08 z m 12 0 v -2.7027 l -1.08 1.08 l -4.0566 -4.0566 l -0.5426 0.5426 l 4.0566 4.0566 l -1.08 1.08 z m -14.4 0.4 h 4.8782 v 4.8782 h -4.8782 z m 0.3927 0.3927 v 4.0928 h 4.0928 v -4.0928 z m 11.5 -0.376 h 4.8782 v 4.8782 h -4.8782 z m 0.3927 0.3927 v 4.0928 h 4.0928 v -4.0928 z m -1.08 4.2 l -0.5736 0.5736 l -2.2531 -2.2738 l -2.2531 2.2738 l -0.5736 -0.5736 l 2.2738 -2.2531 l -2.2738 -2.2531 l 0.5736 -0.5736 l 2.2531 2.2738 l 2.2531 -2.2738 l 0.5736 0.5736 l -2.2738 2.2531 z',
viewBox: {
width: 17,
height: 17,
}
},
description: 'Primtallsfaktorisere',
levels: {
first: 6,
}
};
model.mathOperations[undo.name] = {
steps: [],
icon: '^⮪',
svg: {
path: 'm 0.1 2.2 l 2.0826 -2.0877 v 1.0697 h 2.7182 q 1.4624 0 2.0671 0.4651 q 0.4651 0.3514 0.6201 0.8268 q 0.1602 0.4703 0.1602 1.5038 v 1.0284 h -0.2532 v -0.1395 q 0 -0.9302 -0.4341 -1.2867 q -0.4289 -0.3617 -1.5451 -0.3617 h -3.3331 v 1.0697 z',
viewBox: {
width: 8,
height: 5,
}
},
description: 'Angre',
levels: {
first: 0,
}
};
model.mathOperations[redo.name] = {
steps: [],
icon: '^⮫',
svg: {
path: 'm 7.8 2.2 l -2.0877 2.0877 v -1.0697 h -3.328 q -1.1214 0 -1.5503 0.3617 q -0.4289 0.3566 -0.4289 1.2867 v 0.1395 h -0.2532 v -1.0284 q 0 -1.0335 0.155 -1.5038 q 0.1602 -0.4754 0.6201 -0.8268 q 0.6046 -0.4651 2.0671 -0.4651 h 2.7182 v -1.0697 z',
viewBox: {
width: 8,
height: 5,
}
},
description: 'Gjøre omigjen',
levels: {
first: 0,
}
};
<file_sep>const levelExerciseFunctions = [
// 0 - ferdig løste lignigner
() => createEquation([randomTerm(true, false)]),
// 1 - + slå sammen ledd
() => createEquation([randomTerm(true, false), randomTerm(false, false)]),
// 2 - + trekke fra på begge sider
() => createEquation([randomTerm(false, false)], [randomTerm(true, false)]),
// 3 - som før, men x-ledd også
createEquationWithXTermsWithDiffOfOne,
// 4 - flytte ledd istedenfor trekke fra på begge sider
createEquationWithXTermsWithDiffOfOne,
// 5 - + dele + forkorte
createEquationWithNoNeedForPrimeFactorization,
// 6 - primtallsfaktorisere
createEquationWithNeedForPrimeFactorization,
];
function createEquationWithNeedForPrimeFactorization() {
// ax=b
const commonFactorCount = 1;//randomNumberFromRange(1, 4);
const commonFactors = range(0, commonFactorCount).map(() => randomPrime());
const product = commonFactors.reduce((value, total) => total * value, 1);
const a = product * randomPrime();
const b = product * randomFromArray([2, 3, 5, 7].filter(n => n !== a));
const [a1, a2] = splitNumberInTwoRandomParts(a);
const [b1, b2] = splitNumberInTwoRandomParts(b);
return equationAxBequalsCxD(a1, -b1, -a2, b2);
}
function createEquationWithNoNeedForPrimeFactorization() {
// ax=b, hvor a og b er forskjellige primtall
const a = randomPrime();
const b = randomFromArray([2, 3, 5, 7].filter(n => n !== a));
const [a1, a2] = splitNumberInTwoRandomParts(a);
const [b1, b2] = splitNumberInTwoRandomParts(b);
return equationAxBequalsCxD(a1, -b1, -a2, b2);
}
function equationAxBequalsCxD(a, b, c, d) {
return randomOrderSum(a, b) + '=' + randomOrderSum(c, d);
}
function splitNumberInTwoRandomParts(n) {
const term = numberWithRandomSign(randomNumber());
return [term, n - term];
}
function createEquationWithXTermsWithDiffOfOne() {
const x1abs = randomNumberFromRange(2, 8);
const x2abs = x1abs + 1;
const x1 = randomBool() ? x1abs : -1 * x1abs;
const x2 = x1 > 0 ? x2abs : -1 * x2abs;
const c1abs = randomNumber();
const c2abs = randomNumber();
const c1 = randomBool() ? c1abs : -1 * c1abs;
const c2 = c1 > 0 ? c2abs : -1 * c2abs;
return equationAxBequalsCxD(x1, c1, x2, c2);
}
function numberWithRandomSign(number) {
return randomBool() ? number : -1 * number;
}
function randomOrderSum(x, c) {
const xTxt = x > 0 ? '+' + x : '' + x;
const cTxt = c > 0 ? '+' + c : '' + c;
const sum = randomBool()
? xTxt + '*x' + cTxt
: cTxt + xTxt + '*x';
return sum.startsWith('+') ? sum.substr(1) : sum;
}
function randomBool() {
return Math.random() < 0.5;
}
function createEquation(terms1, terms2) {
if (!terms2) return randomFlipOrNot('x=' + terms1.join(''));
return randomFlipOrNot('x' + terms1.join('') + '=' + terms2.join(''));
}
function randomTerm(isFirst, includeX) {
let txt = '';
txt += randomSign(isFirst ? '' : null);
txt += randomNumber();
return includeX ? txt + '*x' : txt;
}
function randomNumberFromRange(min, max) {
return min + Math.floor(Math.random() * (max - min));
}
function randomNumber() {
return randomNumberFromRange(1, 9)
}
function randomPrime() {
return [2, 3, 5, 7][randomNumberFromRange(0, 3)]
}
function randomFromArray(array) {
return array[randomNumberFromRange(0, array.length)];
}
function randomFlipOrNot(equation) {
if (Math.random() < 0.5) return equation;
const parts = equation.split('=');
return parts[1] + '=' + parts[0];
}
function randomSign(plusSign) {
return Math.random() < 0.5 ? '-' :
plusSign === null ? '+' :
plusSign;
}<file_sep>function lex(mathText) {
const isDigit = char => char >= '0' && char <= '9';
const lastCharacter = text => text.length === 0 ? null : text[text.length - 1];
const addSeparator = (char, text) => !isDigit(char) || !isDigit(lastCharacter(text)) ? ',' : '';
const handleOneChar = (total, current) => total + addSeparator(current, total) + current;
const chars = mathText.split('');
const charsWithSeparators = chars.reduce(handleOneChar, '');
return charsWithSeparators.split(',');
}<file_sep>function createMenuHtml(options, onGoingMathOperation) {
const name = onGoingMathOperation && onGoingMathOperation.name;
const operations = Object.keys(options).filter(isOperationAvailable);
return operations.map(f => `
<button class="${name === f ? 'ongoing' : ''}" onclick="${selectMath.name}('${f}')">${getIcon(f)}</button>
`).join('');
}
function isOperationAvailable(operationName) {
const operation = model.mathOperations[operationName];
return model.level >= operation.levels.first
&& (!operation.levels.last || model.level <= operation.levels.last);
}
function getIcon(f) {
if (f === 'error') return '⚠';
// const icon = model.mathOperations[f].icon;
// if (icon[0] === '^') return `<span style="font-size: 160%">${icon.substring(1)}</span>`;
// return icon
// .replace(/\n/g, '<br/>')
// .replace(/ /g, ' ');
const svg = model.mathOperations[f].svg;
return `
<svg class="svgIcon" viewBox="0 0 ${svg.viewBox.width} ${svg.viewBox.height}">
<path d="${svg.path}" />
</svg>
`;
}
function createMathText(mathText, highlight) {
const tree = parseMathText(mathText);
return createHtml(tree, highlight);
}
function createHtml(node, highlight, showOperator) {
const isLeaf = node.value != undefined;
const isActive = getIsActive(highlight, node);
const cssClass = isActive ? 'highlight' : '';
const onclick = isActive ? `onclick="${doMath.name}('${indexesFromNode(node)}')"` : '';
const operatorHtml = showOperator ? `<div>${node.parent.operator.trim()}</div>` : '';
const includeOperatorInSameHtml =
node.operator !== '='
&& model.onGoingMathOperation && model.onGoingMathOperation.step == 'selectOneTerm';
const contentHtml = isLeaf ? `<div>${node.value.trim()}</div>` : createNodeHtml(node, highlight);
return includeOperatorInSameHtml
? `<div class="flex ${cssClass}" ${onclick}>${operatorHtml}${contentHtml}</div>`
: `${operatorHtml}<div class="flex ${cssClass}" ${onclick}>${contentHtml}</div>`
}
function nodeToString(node) {
const isLeaf = node.value != undefined;
if (isLeaf) {
const sign = parentOperator(node) === '-' && node == node.parent.content[1] ? '-' : '';
return sign + node.value;
}
if (node.content.length === 1) return node.operator + nodeToString(node.content[0]);
return nodeToString(node.content[0]) + node.operator + nodeToString(node.content[1]);
}
function getIsActive(highlight, node) {
return highlight === 'selectOneTerm' && isTopLevelTerm(node)
|| highlight === 'selectNumber' && isNumber(node)
|| highlight === 'selectTopLevelFactor' && isTopLevelFactor(node)
|| highlight === 'selectFactorInNumerator' && isFactorInDivision(node, true)
|| highlight === 'selectFactorInDenominator' && isFactorInDivision(node, false);
}
function isTopLevelFactor(node) {
if (isTopLevelTerm(node) && node.value !== undefined) return true;
if (parentOperator(node) !== '*') return false;
const product = getTopLevelProductOfFactor(node);
return isTopLevelTerm(product);
}
function getTopLevelProductOfFactor(node) {
return parentOperator(node) === '*'
? getTopLevelProductOfFactor(node.parent)
: node;
}
function isFactorInDivision(node, lookInNumerator) {
const isPrimitiveOrNotProduct = !node.operator || node.operator !== '*';
if (isNumeratorOrDenominator(node, lookInNumerator)) return isPrimitiveOrNotProduct;
const product = getTopLevelProductOfFactor(node);
return (node.value !== undefined || node.operator !== '*')
&& isNumeratorOrDenominator(product, lookInNumerator);
}
function isNumeratorOrDenominator(node, numerator) {
const index = numerator ? 0 : 1;
return parentOperator(node) === '/' && indexWithParent(node) === index;
}
function createNodeHtml(node, highlight) {
const op = node.operator.trim();
if (op === '/') return `
<div class="flex vertical">
${createHtml(node.content[0], highlight)}
<div class="fraction"> </div>
${createHtml(node.content[1], highlight)}
</div>
`;
if (node.content.length == 2) {
const showOperator = node.operator !== '*' || showMultiplicationOperator(node);
return `
<div class="flex">
${createHtml(node.content[0], highlight)}
${createHtml(node.content[1], highlight, showOperator)}
</div>
`;
}
if (op === '-' && node.content.length === 1) {
const child = node.content[0];
if (isNumber(child) || isLetter(child)) {
return `
<div class="flex">
-${createHtml(node.content[0], highlight)}
</div>
`;
}
return `
<div class="flex">
(-${createHtml(node.content[0], highlight)})
</div>
`;
}
console.error('cannot create HTML', node);
}
function showMultiplicationOperator(node) {
const isNumberOrUnaryMinusNumber = isNumber(node.content[0])
|| isUnaryMinus(node.content[0]) && isNumber(node.content[0].content[0]);
if (isNumberOrUnaryMinusNumber && (isLetter(node.content[1]) || isMultiplication(node.content[1])))
return false;
return true;
}
<file_sep>newExercise();
function updateView() {
const videoHtml = `<iframe width="560" height="315" src="https://www.youtube.com/embed/${model.youTubeVideoIds[model.level]}" frameborder="0" allow="accelerometer; autoplay; encrypted-media; gyroscope; picture-in-picture" allowfullscreen></iframe>`;
document.getElementById('app').innerHTML = `
<div class="mainPart history historyPre">
${createHistoryHtml(true)}
</div>
<div id="mathContent" class="math mainPart">
${createMathText(model.mathText, getStep())}
</div>
<div class="mainPart history">
${createHistoryHtml(false)}
</div>
<div class="mainPart panel">
<div id="txt">${getText()}</div>
<div id="menuBar">
${createMenuHtml(model.mathOperations, model.onGoingMathOperation)}
</div>
</div>
<div class="mainPart">
<div>
<button class="video"
onclick="${toggleVideos.name}()">
${model.showVideos ? 'Skjul' : 'Vis' }
video
</button>
</div>
${model.showVideos ? videoHtml : ''}
</div>
<div class="mainPart panel footer">
<div class="levels" >
<button class="exercise" onclick="${newExercise.name}()">Ny nivå ${model.level}-oppgave</button>
<div style="width: 40px"></div>
<input type="text" oninput="${Object.keys({x: model})[0]}.${Object.keys(model)[2]}=this.value"/>
<button class="exercise" onclick="${newCustomExercise.name}()">Ny egen oppgave</button>
</div>
<div class="levels">
Nivåer:
${createLevelsMenuHtml()}
<button onclick="window.open('https://m.me/playterje')" class="level kontakt">Kontakt PlayTerje</div>
</div>
</div>
`;
const el = document.getElementsByClassName('historyPre')[0];
el.scrollTop = el.scrollHeight;
}
function toggleVideos(){
model.showVideos = !model.showVideos;
updateView();
}
function createHistoryHtml(isPreHistory) {
const history = model.history;
const index = history.index;
const allItems = history.items;
const limits = isPreHistory ? [0, index] : [index + 1, allItems.length];
const items = model.history.items.slice(...limits);
return items.map(mathText => `
<div id="mathContent" class="math">
${createMathText(mathText, null)}
</div>
`).join('');
}
function newExercise() {
const fn = levelExerciseFunctions[model.level];
newExerciseImpl(fn());
}
function newCustomExercise() {
model.errorMessage = '';
let result = null;
try {
result = parseMathText(model.ownExercise);
} catch (error) {
model.errorMessage = `Kan ikke tolke uttrykket <tt>${model.ownExercise}</tt>`;
updateView();
return;
}
newExerciseImpl(model.ownExercise);
}
function newExerciseImpl(exercise) {
model.errorMessage = null;
model.mathText = exercise;
model.history.items.length = 0;
model.history.items.push(exercise);
model.history.index = 0;
updateView();
}
function createLevelsMenuHtml() {
return range(1, 7).map(level => `
<button class="level ${level === model.level ? 'selectedLevel' : ''}"
onclick="${selectLevel.name}(${level})">
${level}
</button>
`).join('');
}
function selectLevel(level) {
model.level = level;
updateView();
}
function getStep() {
const operation = model.onGoingMathOperation;
return operation ? operation.step : null;
}
function getText() {
const message = createMessage();
// const error = `<div class="error">${model.errorMessage || ''}</div>`;
if (model.errorMessage) {
const description = createDescriptionHtml('error', model.errorMessage, 'error');
return createText(description, message);
}
const operation = model.onGoingMathOperation;
if (!operation) return createText('', message);
const step = operation ? operation.step : null;
if (!step) return createText();
const operationName = operation.name;
const mathOperation = model.mathOperations[operationName];
const description = createDescriptionHtml(operationName, mathOperation.description);
const length = operation.arguments ? operation.arguments.length : 0;
const stepsHtml = operation.steps.map((step, i) => `
<li class="${i == length ? '' : 'passive'}">${step}</li>
`).join('');
return createText(description, `<ol>${stepsHtml}</ol>`);
}
function createDescriptionHtml(operationName, text, extraCssClass) {
return `
<div class="selectedFunction">
<button class="display ${extraCssClass || ''}" disabled>
${getIcon(operationName)}
</button>
<div class="${extraCssClass || 'function'}">
${text}
</div>
</div>
`;
}
function createMessage() {
return model.mathText && isEquationSolved(model.mathText) ? 'Ligningen er løst.' :
model.level === 0 ? '' :
'Velg operasjon:';
}
function isEquationSolved(mathText) {
const node = parseMathText(mathText);
const letterOnOneSide = isLetter(node.content[0]) || isLetter(node.content[1]);
const numberOnOneSide = isNumber(node.content[0]) || isNumber(node.content[1])
|| isUnaryNumber(node.content[0]) || isUnaryNumber(node.content[1]);
if (letterOnOneSide && numberOnOneSide) return true;
if (!letterOnOneSide) return false;
const fraction = node.content[0].operator === '/' ? node.content[0]
: node.content[1].operator === '/' ? node.content[1] : null;
if (fraction === null) return false;
if (!isNumber(fraction.content[0]) || !isNumber(fraction.content[1])) return false;
const number1 = parseInt(fraction.content[0].value);
const number2 = parseInt(fraction.content[1].value);
return !primeFactorizeImpl(number1).includes('*')
&& !primeFactorizeImpl(number2).includes('*');
}
function isUnaryNumber(node) {
return isUnaryMinus(node) && isNumber(node.content[0]);
}
function createText(fn, step) {
return `
<div>${fn || ' '}</div>
<div class="step"><i>${step || ' '}</i></div>
`;
}
<file_sep>function indexesFromNode(node) {
if (!node.parent) return '';
const lastChoice = node.parent.content[0] === node ? '0' : '1';
return indexesFromNode(node.parent) + lastChoice;
}
function nodeFromIndexes(indexes, tree) {
let node = tree;
for (let index of indexes) {
node = node.content[index];
}
return node;
}
function isLetter(node) {
return node.value && node.value.length === 1 && node.value >= 'a' && node.value <= 'z';
}
function isNumber(node) {
return typeof (node.value) === 'string'
&& node.value.length > 0
&& node.value.split('').filter(c => c < '0' || c > '9').length === 0;
}
function isUnaryMinus(node) {
return node.operator === '-' && node.content.length === 1;
}
function isMultiplication(node) {
return node.operator === '*';
}
function isDivision(node) {
return node.operator === '/';
}
function isTopLevelTerm(node) {
if ('=+-'.includes(node.operator) && node.content.length === 2) return false;
return firstParentOperatorOtherThan('+-', node) === '=' && !isUnaryMinus(node.parent);
}
function firstParentOperatorOtherThan(operators, node) {
return operators.includes(node.parent.operator)
? firstParentOperatorOtherThan(operators, node.parent)
: node.parent.operator;
}
function createConstantNode(constant) {
const node = { value: '' + Math.abs(constant) };
return constant < 0 ? makeNode('-', [node]) : node;
}
function parentOperator(node) {
return node.parent ? node.parent.operator : null;
}
function parentParentOperator(node) {
return node.parent && node.parent.parent ? node.parent.parent.operator : null;
}
function treeAsText(node) {
const txt = node.value !== undefined
? node.value
: node.operator + '(' + node.content.map(c => treeAsText(c)).join() + ')';
return '[' + indexesFromNode(node) + ']' + txt;
}
function range(min, max) {
const count = max - min;
return Array.from(Array(count).keys()).map(n => n + min);
}
function isFunction(functionToCheck) {
return functionToCheck && {}.toString.call(functionToCheck) === '[object Function]';
}const levelExerciseFunctions = [
// 0 - ferdig løste lignigner
() => createEquation([randomTerm(true, false)]),
// 1 - + slå sammen ledd
() => createEquation([randomTerm(true, false), randomTerm(false, false)]),
// 2 - + trekke fra på begge sider
() => createEquation([randomTerm(false, false)], [randomTerm(true, false)]),
// 3 - som før, men x-ledd også
createEquationWithXTermsWithDiffOfOne,
// 4 - flytte ledd istedenfor trekke fra på begge sider
createEquationWithXTermsWithDiffOfOne,
// 5 - + dele + forkorte
createEquationWithNoNeedForPrimeFactorization,
// 6 - primtallsfaktorisere
createEquationWithNeedForPrimeFactorization,
];
function createEquationWithNeedForPrimeFactorization() {
// ax=b
const commonFactorCount = 1;//randomNumberFromRange(1, 4);
const commonFactors = range(0, commonFactorCount).map(() => randomPrime());
const product = commonFactors.reduce((value, total) => total * value, 1);
const a = product * randomPrime();
const b = product * randomFromArray([2, 3, 5, 7].filter(n => n !== a));
const [a1, a2] = splitNumberInTwoRandomParts(a);
const [b1, b2] = splitNumberInTwoRandomParts(b);
return equationAxBequalsCxD(a1, -b1, -a2, b2);
}
function createEquationWithNoNeedForPrimeFactorization() {
// ax=b, hvor a og b er forskjellige primtall
const a = randomPrime();
const b = randomFromArray([2, 3, 5, 7].filter(n => n !== a));
const [a1, a2] = splitNumberInTwoRandomParts(a);
const [b1, b2] = splitNumberInTwoRandomParts(b);
return equationAxBequalsCxD(a1, -b1, -a2, b2);
}
function equationAxBequalsCxD(a, b, c, d) {
return randomOrderSum(a, b) + '=' + randomOrderSum(c, d);
}
function splitNumberInTwoRandomParts(n) {
const term = numberWithRandomSign(randomNumber());
return [term, n - term];
}
function createEquationWithXTermsWithDiffOfOne() {
const x1abs = randomNumberFromRange(2, 8);
const x2abs = x1abs + 1;
const x1 = randomBool() ? x1abs : -1 * x1abs;
const x2 = x1 > 0 ? x2abs : -1 * x2abs;
const c1abs = randomNumber();
const c2abs = randomNumber();
const c1 = randomBool() ? c1abs : -1 * c1abs;
const c2 = c1 > 0 ? c2abs : -1 * c2abs;
return equationAxBequalsCxD(x1, c1, x2, c2);
}
function numberWithRandomSign(number) {
return randomBool() ? number : -1 * number;
}
function randomOrderSum(x, c) {
const xTxt = x > 0 ? '+' + x : '' + x;
const cTxt = c > 0 ? '+' + c : '' + c;
const sum = randomBool()
? xTxt + '*x' + cTxt
: cTxt + xTxt + '*x';
return sum.startsWith('+') ? sum.substr(1) : sum;
}
function randomBool() {
return Math.random() < 0.5;
}
function createEquation(terms1, terms2) {
if (!terms2) return randomFlipOrNot('x=' + terms1.join(''));
return randomFlipOrNot('x' + terms1.join('') + '=' + terms2.join(''));
}
function randomTerm(isFirst, includeX) {
let txt = '';
txt += randomSign(isFirst ? '' : null);
txt += randomNumber();
return includeX ? txt + '*x' : txt;
}
function randomNumberFromRange(min, max) {
return min + Math.floor(Math.random() * (max - min));
}
function randomNumber() {
return randomNumberFromRange(1, 9)
}
function randomPrime() {
return [2, 3, 5, 7][randomNumberFromRange(0, 3)]
}
function randomFromArray(array) {
return array[randomNumberFromRange(0, array.length)];
}
function randomFlipOrNot(equation) {
if (Math.random() < 0.5) return equation;
const parts = equation.split('=');
return parts[1] + '=' + parts[0];
}
function randomSign(plusSign) {
return Math.random() < 0.5 ? '-' :
plusSign === null ? '+' :
plusSign;
}/*
EXPR --> TERM {( "+" | "-" ) TERM}
TERM --> FACT {( "*" | "/" ) FACT}
FACTOR --> P ["^" FACTOR]
P --> v | "(" EXPRESSION ")" | "-" TERM
*/
//parse(['x']);
// parse(['x','+','y']);
// console.log(parse(['x', '+', 'y', '*', '2']));
//console.log(parse(['x', '+', '1', '/', '(', 'y', '+', '2', ')']));
//setTimeout(() => show('2+x-1'), 100);
function parseMathText(mathText) {
const equalSignIndex = mathText.indexOf('=');
if (equalSignIndex == -1) {
const tokens = lex(mathText);
return parse(tokens);
}
const leftSide = mathText.substr(0, equalSignIndex);
const rightSide = mathText.substr(equalSignIndex + 1);
const leftSideTree = parseMathText(leftSide);
const rightSideTree = parseMathText(rightSide);
let tree = makeNode('=', [leftSideTree, rightSideTree]);
tree = addParentAndId(tree, null);
return tree;
}
function toString(node) {
if (node.value != undefined) {
return node.value;
}
if (isUnaryMinus(node)) {
if (isFirstTerm(node)) return '-' + toString(node.content[0]);
else return '(-' + toString(node.content[0]) + ')';
}
if (node.operator === '/') {
return '(' + toString(node.content[0]) + ')' + node.operator + '(' + toString(node.content[1]) + ')';
}
if (node.operator === '-' && '+-'.includes(node.content[1].operator)) {
return toString(node.content[0]) + node.operator + '(' + toString(node.content[1]) + ')';
}
return toString(node.content[0]) + node.operator + toString(node.content[1]);
}
function isFirstTerm(node) {
const isFirstWithParent = node.parent && node === node.parent.content[0];
if (node.parent && '+-'.includes(parentOperator(node))) return isFirstWithParent && isFirstTerm(node.parent);
return true;
}
function addParentAndId(node, parent) {
if (!node) return;
node.parent = parent;
if (!node.content) return;
for (var child of node.content) {
addParentAndId(child, node);
}
return node;
}
function lex(mathText) {
const isDigit = char => char >= '0' && char <= '9';
const lastCharacter = text => text.length === 0 ? null : text[text.length - 1];
const addSeparator = (char, text) => text.length > 0 && (!isDigit(char) || !isDigit(lastCharacter(text)));
const separator = (char, text) => addSeparator(char, text) ? ',' : '';
const handleOneChar = (total, current) => total + separator(current, total) + current;
const chars = mathText.split('');
const charsWithSeparators = chars.reduce(handleOneChar, '');
return charsWithSeparators.split(',');
}
function parse(tokens) {
const state = parseExpression(tokens);
return state.tokens.length > 0 ? null : state.tree;
}
function parseExpression(tokens) {
return parseMultipart(tokens, '+-', parseTerm);
}
function parseTerm(tokens) {
return parseMultipart(tokens, '*/', parseFactor);
}
function parseMultipart(tokens, operators, parseFn) {
let partState1 = parseFn(tokens);
while (operators.includes(partState1.tokens[0])) {
const operator = partState1.tokens.shift();
const partState2 = parseFn(partState1.tokens);
partState1.tree = makeNode(operator, [partState1.tree, partState2.tree])
partState1.tokens = partState2.tokens;
}
return partState1;
}
function parseFactor(tokens) {
const state = parseParenthesisValueOrUnary(tokens);
let myTokens = state.tokens;
if (myTokens[0] !== '^') return state;
myTokens.shift();
const factorState = parseFactor(myTokens);
return makeState(factorState.tokens, makeNode('^', [state.tree, factorState.tree]));
}
function parseParenthesisValueOrUnary(tokens) {
if (isNumberOrLetter(tokens[0])) {
const value = tokens.shift();
return makeState(tokens, makeLeaf(value));
} else if (tokens[0] === '(') {
tokens.shift();
const state = parseExpression(tokens);
if (tokens.shift() !== ')') console.error('expected )');
return state;
} else if (tokens[0] === '-') {
tokens.shift();
const state = parseFactor(tokens);
return makeState(tokens, makeNode('-', [state.tree]));
} else {
console.error('Error in parseParenthesisValueOrUnary. Tokens: ', tokens)
}
}
function isNumberOrLetter(text) {
return text[0] >= '0' && text[0] <= '9' || text[0] >= 'a' && text[0] <= 'z';
}
function makeNode(operator, content) {
return { operator, content };
}
function makeLeaf(value) {
return { value };
}
function makeState(tokens, tree) {
return { tokens, tree };
}function createMenuHtml(options, onGoingMathOperation) {
const name = onGoingMathOperation && onGoingMathOperation.name;
const operations = Object.keys(options).filter(isOperationAvailable);
return operations.map(f => `
<button class="${name === f ? 'ongoing' : ''}" onclick="${selectMath.name}('${f}')">${getIcon(f)}</button>
`).join('');
}
function isOperationAvailable(operationName) {
const operation = model.mathOperations[operationName];
return model.level >= operation.levels.first
&& (!operation.levels.last || model.level <= operation.levels.last);
}
function getIcon(f) {
if (f === 'error') return '⚠';
// const icon = model.mathOperations[f].icon;
// if (icon[0] === '^') return `<span style="font-size: 160%">${icon.substring(1)}</span>`;
// return icon
// .replace(/\n/g, '<br/>')
// .replace(/ /g, ' ');
const svg = model.mathOperations[f].svg;
return `
<svg class="svgIcon" viewBox="0 0 ${svg.viewBox.width} ${svg.viewBox.height}">
<path d="${svg.path}" />
</svg>
`;
}
function createMathText(mathText, highlight) {
const tree = parseMathText(mathText);
return createHtml(tree, highlight);
}
function createHtml(node, highlight, showOperator) {
const isLeaf = node.value != undefined;
const isActive = getIsActive(highlight, node);
const cssClass = isActive ? 'highlight' : '';
const onclick = isActive ? `onclick="${doMath.name}('${indexesFromNode(node)}')"` : '';
const operatorHtml = showOperator ? `<div>${node.parent.operator.trim()}</div>` : '';
const includeOperatorInSameHtml =
node.operator !== '='
&& model.onGoingMathOperation && model.onGoingMathOperation.step == 'selectOneTerm';
const contentHtml = isLeaf ? `<div>${node.value.trim()}</div>` : createNodeHtml(node, highlight);
return includeOperatorInSameHtml
? `<div class="flex ${cssClass}" ${onclick}>${operatorHtml}${contentHtml}</div>`
: `${operatorHtml}<div class="flex ${cssClass}" ${onclick}>${contentHtml}</div>`
}
function nodeToString(node) {
const isLeaf = node.value != undefined;
if (isLeaf) {
const sign = parentOperator(node) === '-' && node == node.parent.content[1] ? '-' : '';
return sign + node.value;
}
if (node.content.length === 1) return node.operator + nodeToString(node.content[0]);
return nodeToString(node.content[0]) + node.operator + nodeToString(node.content[1]);
}
function getIsActive(highlight, node) {
return highlight === 'selectOneTerm' && isTopLevelTerm(node)
|| highlight === 'selectNumber' && isNumber(node)
|| highlight === 'selectTopLevelFactor' && isTopLevelFactor(node)
|| highlight === 'selectFactorInNumerator' && isFactorInDivision(node, true)
|| highlight === 'selectFactorInDenominator' && isFactorInDivision(node, false);
}
function isTopLevelFactor(node) {
if (isTopLevelTerm(node) && node.value !== undefined) return true;
if (parentOperator(node) !== '*') return false;
const product = getTopLevelProductOfFactor(node);
return isTopLevelTerm(product);
}
function getTopLevelProductOfFactor(node) {
return parentOperator(node) === '*'
? getTopLevelProductOfFactor(node.parent)
: node;
}
function isFactorInDivision(node, lookInNumerator) {
const isPrimitiveOrNotProduct = !node.operator || node.operator !== '*';
if (isNumeratorOrDenominator(node, lookInNumerator)) return isPrimitiveOrNotProduct;
const product = getTopLevelProductOfFactor(node);
return (node.value !== undefined || node.operator !== '*')
&& isNumeratorOrDenominator(product, lookInNumerator);
}
function isNumeratorOrDenominator(node, numerator) {
const index = numerator ? 0 : 1;
return parentOperator(node) === '/' && indexWithParent(node) === index;
}
function createNodeHtml(node, highlight) {
const op = node.operator.trim();
if (op === '/') return `
<div class="flex vertical">
${createHtml(node.content[0], highlight)}
<div class="fraction"> </div>
${createHtml(node.content[1], highlight)}
</div>
`;
if (node.content.length == 2) {
const showOperator = node.operator !== '*' || showMultiplicationOperator(node);
return `
<div class="flex">
${createHtml(node.content[0], highlight)}
${createHtml(node.content[1], highlight, showOperator)}
</div>
`;
}
if (op === '-' && node.content.length === 1) {
const child = node.content[0];
if (isNumber(child) || isLetter(child)) {
return `
<div class="flex">
-${createHtml(node.content[0], highlight)}
</div>
`;
}
return `
<div class="flex">
(-${createHtml(node.content[0], highlight)})
</div>
`;
}
console.error('cannot create HTML', node);
}
function showMultiplicationOperator(node) {
const isNumberOrUnaryMinusNumber = isNumber(node.content[0])
|| isUnaryMinus(node.content[0]) && isNumber(node.content[0].content[0]);
if (isNumberOrUnaryMinusNumber && (isLetter(node.content[1]) || isMultiplication(node.content[1])))
return false;
return true;
}
function selectMath(functionName) {
model.errorMessage = '';
model.onGoingMathOperation = {
name: functionName,
arguments: [],
};
selectMathImpl();
}
function selectMathImpl() {
const operation = model.onGoingMathOperation;
const functionName = operation.name;
const selectedFunction = model.mathOperations[functionName];
if (!selectedFunction.steps) {
console.error('unknown function ' + functionName);
return;
}
operation.steps = selectedFunction.steps.map(step => model.steps[step]);
const stepIndex = model.onGoingMathOperation.arguments.length;
const step = selectedFunction.steps[stepIndex];
model.onGoingMathOperation.step = step;
if (operation.steps.length === 0) doMath();
updateView();
}
function doMath(arg) {
const operation = model.onGoingMathOperation;
const args = operation.arguments;
if (args.length === 0 && [mergeTerms.name, reduceFraction.name].includes(operation.name)) {
nextStep(arg);
return;
}
args.push(arg);
const func = eval(operation.name);
if (!isFunction(func)) {
console.error('unknown operation: ' + model.onGoingMathOperation.name);
return;
}
func(...args);
if ([undo.name, redo.name].includes(operation.name)) {
resetAndUpdateView();
return;
}
const history = model.history.items;
const index = model.history.index;
if (index < history.length) {
history.splice(index + 1);
}
history.push(model.mathText);
model.history.index++;
resetAndUpdateView();
}
function undo() {
const history = model.history;
const items = history.items;
if (items.length === 0 || history.index === 0) return;
history.index--;
model.mathText = items[history.index];
}
function redo() {
const history = model.history.items;
const index = model.history.index;
if (index + 1 >= history.length) return;
model.history.index++;
model.mathText = history[model.history.index];
}
function nextStep(arg) {
const operation = model.onGoingMathOperation;
operation.arguments.push(arg);
selectMathImpl();
updateView();
}
function primeFactorize(indexes) {
const tree = parseMathText(model.mathText);
const node = nodeFromIndexes(indexes, tree);
if (!isNumber(node)) return;
const number = parseInt(node.value);
const primeFactors = primeFactorizeImpl(number);
const product = parseMathText(primeFactors);
replaceNode(node, product);
model.mathText = toString(tree);
}
function findLowestFactor(number, factor) {
return number % factor == 0 ? factor : findLowestFactor(number, factor + 1);
}
function primeFactorizeImpl(number) {
const factor = findLowestFactor(number, 2);
if (factor === number) return '' + number;
return factor + '*' + primeFactorizeImpl(number / factor);
}
function subtractTermOnBothSides(indexes) {
moveTermToOtherSide(indexes, true);
}
function moveTermToOtherSide(indexes, subtractOnBothSides) {
const tree = parseMathText(model.mathText);
const node = nodeFromIndexes(indexes, tree);
const nodeSide = getSideOfEquaction(node);
const otherSide = 1 - nodeSide;
const existingSign = getCombinedSignOfTopLevelTerm(node);
let count = 0;
while (removeUnaryMinusFactors(node, tree)) count++;
const newSign = count % 2 === 1
? (existingSign === 1 ? '+' : '-')
: (existingSign === 1 ? '-' : '+');
replaceNode(tree.content[otherSide], sideWithNewNode(tree, otherSide, node, newSign));
if (subtractOnBothSides) {
replaceNode(tree.content[nodeSide], sideWithNewNode(tree, nodeSide, node, newSign));
} else {
replaceNode(node, { value: '0' });
}
addParentAndId(tree);
doSimplifications(tree);
model.mathText = toString(tree);
}
function removeUnaryMinusFactors(node) {
if (node.value !== undefined || node.operator !== '*') return false;
if (isUnaryMinus(node.content[0])) {
replaceNode(node.content[0], node.content[0].content[0]);
return true;
} else {
if (removeUnaryMinusFactors(node.content[0])) return true;
}
if (isUnaryMinus(node.content[1])) {
replaceNode(node.content[1], node.content[1].content[0]);
return true;
} else {
if (removeUnaryMinusFactors(node.content[1])) return true;
}
return false;
}
function sideWithNewNode(tree, side, node, sign) {
const newNodeContent = [tree.content[side], node].map(cloneNode);
return makeNode(sign, newNodeContent);
}
function getSideOfEquaction(node) {
return parentOperator(node) === '='
? indexWithParent(node)
: getSideOfEquaction(node.parent);
}
function mergeTerms(indexes1, indexes2) {
const tree = parseMathText(model.mathText);
const selectedNode1 = nodeFromIndexes(indexes1, tree);
const selectedNode2 = nodeFromIndexes(indexes2, tree);
if (nodesAreOnSeparateSides(selectedNode1, selectedNode2, tree)) {
return finishWithError('Kan bare slå sammen ledd som er på samme side av ligningen.');
}
if (!isTopLevelTerm(selectedNode1) || !isTopLevelTerm(selectedNode2)) {
return finishWithError('Kan bare slå sammen ledd som er på toppnivå på høyre eller venstre side av ligningen.');
}
let node1 = isUnaryMinus(selectedNode1) ? selectedNode1.content[0] : selectedNode1;
let node2 = isUnaryMinus(selectedNode2) ? selectedNode2.content[0] : selectedNode2;
let typeTerm1 = getType(node1);
let typeTerm2 = getType(node2);
if (typeTerm1 > typeTerm2) [node1, node2, typeTerm1, typeTerm2] = [node2, node1, typeTerm2, typeTerm1];
if (typeTerm1 === 'constant') {
if (typeTerm2 !== 'constant') return finishWithError('Konstantledd kan bare slås sammen med andre konstantledd.');
mergeConstantAndConstant(node1, node2);
} else if (typeTerm1 === 'letter' && ['product', 'letter'].includes(typeTerm2)) {
const newNode1 = replaceLetterWithProductOfOne(node1);
const newNode2 = replaceLetterWithProductOfOne(node2);
addParentAndId(tree);
mergeProductAndProduct(newNode1, newNode2);
} else if (typeTerm1 === 'product') {
if (typeTerm2 !== 'product') return finishWithError('Kan ikke slå sammen disse leddene.');
mergeProductAndProduct(node1, node2);
} else if (typeTerm1 === 'division') {
}
addParentAndId(tree);
doSimplifications(tree);
model.mathText = toString(tree);
}
function replaceLetterWithProductOfOne(node) {
if (!isLetter(node)) return node;
const newNode = makeNode('*', [{ value: '1' }, node]);
replaceNode(node, newNode);
return newNode;
}
function mergeProductAndProduct(node1, node2) {
if (!productsExceptFromFirstConstantsAreEqual(node1, node2)) {
return finishWithError('Produktledd må være like, bortsett fra ev. første konstantfaktor, for å kunne slås sammen.');
}
const factor1 = getFirstFactorInProduct(node1);
const factor2 = getFirstFactorInProduct(node2);
const value1 = numberOrUnaryMinusNumberValue(factor1) || 1;
const value2 = numberOrUnaryMinusNumberValue(factor2) || 1;
const constant1 = value1 * getCombinedSignOfTopLevelTerm(node1);
const constant2 = value2 * getCombinedSignOfTopLevelTerm(node2);
const newSum = constant1 + constant2;
const isPositive1 = constant1 > 0;
const isPositive2 = constant2 > 0;
if (newSum === 0) {
removeNode(node1);
removeNode(node2);
} else if (isPositive1 === isPositive2) {
adjustFactor(node1, factor1, newSum);
removeNode(node2);
} else {
const positiveNode = isPositive1 ? node1 : node2;
const negativeNode = isPositive1 ? node2 : node1;
const positiveFactor = isPositive1 ? factor1 : factor2;
const negativeFactor = isPositive1 ? factor2 : factor1;
if (newSum > 0) {
adjustFactor(positiveNode, positiveFactor, newSum);
removeNode(negativeNode);
}
else {
adjustFactor(negativeNode, negativeFactor, newSum);
removeNode(positiveNode);
}
}
}
function adjustFactor(node, factor, constant) {
if (isNumber(factor)) {
factor.value = '' + Math.abs(constant);
return;
}
if (isUnaryMinus(factor)) {
adjustConstant(factor.content[0], constant);
return;
}
replaceNode(node, makeNode('*', [{ value: '' + constant }, node]));
}
function productsExceptFromFirstConstantsAreEqual(node1input, node2input) {
const node1 = cloneNode(node1input);
const node2 = cloneNode(node2input);
const wrapper1 = createWrapperEquation(node1);
const wrapper2 = createWrapperEquation(node2);
const firstFactor1 = getFirstFactorInProduct(node1);
const firstFactor2 = getFirstFactorInProduct(node2);
const value1 = numberOrUnaryMinusNumberValue(firstFactor1);
const value2 = numberOrUnaryMinusNumberValue(firstFactor2);
if (value1 !== null) removeNode(firstFactor1);
if (value2 !== null) removeNode(firstFactor2);
return nodesAreEqual(wrapper1, wrapper2);
}
function numberOrUnaryMinusNumberValue(node) {
if (isNumber(node)) return parseInt(node.value);
if (isUnaryMinus(node) && isNumber(node.content[0])) return -1 * parseInt(node.content[0].value);
return null;
}
function doSimplifications(node) {
while (replaceProductsOfOne(node));
while (removeUnariesInUnaries(node));
while (removeUnariesInSecondPositionSubtraction(node));
while (replaceDivideByOne(node));
while (removeTermsZero(node));
}
function removeTermsZero(node) {
if (isNumber(node) && node.value === '0' && '+-'.includes(parentOperator(node))) {
if (isUnaryMinus(node.parent)) {
removeNode(node.parent);
} else if (indexWithParent(node) === 0 && parentOperator(node) === '-') {
removeNode(node);
} else {
replaceNode(node.parent, siblingNode(node));
}
return true;
}
if (node.value !== undefined) return false;
if (removeTermsZero(node.content[0])) return true;
if (node.content.length > 1 && removeTermsZero(node.content[1])) return true;
return false;
}
function replaceDivideByOne(node) {
const isDenominator = parentOperator(node) === '/' && indexWithParent(node) === 1;
if (isDenominator && node.value === '1') {
const fraction = node.parent;
replaceNode(fraction, fraction.content[0]);
return true;
}
if (node.value !== undefined) return false;
if (replaceDivideByOne(node.content[0])) return true;
if (node.content.length > 1 && replaceDivideByOne(node.content[1])) return true;
return false;
}
function replaceProductsOfOne(node) {
if (node.value !== undefined) return false;
if (node.operator !== '*') {
if (replaceProductsOfOne(node.content[0])) return true;
if (node.content.length > 1 && replaceProductsOfOne(node.content[1])) return true;
return false;
}
const value1 = numberOrUnaryMinusNumberValue(node.content[0]);
const value2 = numberOrUnaryMinusNumberValue(node.content[1]);
const isOneOrMinus1 = Math.abs(value1) === 1;
const isOneOrMinus2 = Math.abs(value2) === 1;
if (!isOneOrMinus1 && replaceProductsOfOne(node.content[0])) return true;
if (!isOneOrMinus2 && replaceProductsOfOne(node.content[1])) return true;
if (isOneOrMinus1 || isOneOrMinus2) {
replaceNode(node, node.content[isOneOrMinus1 ? 1 : 0]);
return true;
}
return false;
}
function removeUnariesInSecondPositionSubtraction(node) {
if (node.value !== undefined) return false;
if (node.operator === '-' && node.content.length == 2 && isUnaryMinus(node.content[1])) {
node.operator = '+';
node.content[1] = node.content[1].content[0];
return true;
}
if (removeUnariesInSecondPositionSubtraction(node.content[0])) return true;
if (node.content.length === 2 && removeUnariesInSecondPositionSubtraction(node.content[1])) return true;
return false;
}
function removeUnariesInUnaries(node) {
if (node.value !== undefined) return false;
if (isUnaryMinus(node) && isUnaryMinus(node.content[0])) {
const newNode = node.content[0].content[0];
replaceNode(node, newNode);
removeUnariesInUnaries(newNode);
return true;
}
if (removeUnariesInUnaries(node.content[0])) return true;
if (node.content.length === 2 && removeUnariesInUnaries(node.content[1])) return true;
return false;
}
function mergeConstantAndConstant(selectedNode1, selectedNode2) {
const constant1 = parseInt(selectedNode1.value) * getCombinedSignOfTopLevelTerm(selectedNode1);
const constant2 = parseInt(selectedNode2.value) * getCombinedSignOfTopLevelTerm(selectedNode2);
const newSum = constant1 + constant2;
const isPositive1 = constant1 > 0;
const isPositive2 = constant2 > 0;
if (newSum === 0) {
removeNode(selectedNode1);
replaceNode(selectedNode2, {value: '0'});
} else if (isPositive1 === isPositive2) {
adjustConstant(selectedNode1, newSum);
removeNode(selectedNode2);
} else {
const positiveNode = isPositive1 ? selectedNode1 : selectedNode2;
const negativeNode = isPositive1 ? selectedNode2 : selectedNode1;
if (newSum > 0) {
adjustConstant(positiveNode, newSum);
removeNode(negativeNode);
} else {
adjustConstant(negativeNode, newSum);
removeNode(positiveNode);
}
}
}
function getType(node) {
if (node.value !== undefined) {
if (isNumber(node)) return 'constant';
if (isLetter(node)) return 'letter';
throw "unknown type: " + toString(node);
}
if (isUnaryMinus(node)) return 'unary minus';
if (isMultiplication(node)) return 'product';
if (isDivision(node)) return 'division';
throw "unknown type: " + toString(node);
}
function adjustConstant(node, newConstant) {
if (isNumber(node)) {
node.value = '' + Math.abs(newConstant);
return;
}
if (node.operator === '*') {
const constantNode = getFirstConstantInProduct(node);
if (constantNode !== null) constantNode.value = '' + Math.abs(newConstant);
return;
}
if (node.operator === '/' || isUnaryMinus(node)) {
adjustConstant(node.content[0], newConstant);
return;
}
throw "cannot adjust constant in " + toString(node);
}
function finishWithError(errorMessage) {
model.errorMessage = errorMessage;
resetAndUpdateView();
return 'dummy';
}
function createWrapperEquation(node) {
const equation = makeNode('=', [{ value: 1 }, node]);
addParentAndId(equation);
return equation;
}
function getFirstFactorInProduct(product) {
return isMultiplication(product.content[0])
? getFirstFactorInProduct(product.content[0])
: product.content[0];
}
function getCombinedSignOfTopLevelTerm(node) {
if (node.parent.operator === '=') return 1;
const factor = node.parent.operator !== '-'
|| (node.parent.content.length === 2 && node.parent.content[0] === node)
? 1
: -1;
return factor * getCombinedSignOfTopLevelTerm(node.parent);
}
function getSignFromParent(node) {
return isSecondPartOfMinus(node) ? -1 : 1;
}
function isSecondPartOfMinus(node) {
return parentOperator(node) === '-' && node === node.parent.content[1];
}
function nodesAreOnSeparateSides(node1, node2, tree) {
const firstIndexOnRightSide = indexesFromNode(tree.content[1]);
const node1Side = indexesFromNode(node1) < firstIndexOnRightSide;
const node2Side = indexesFromNode(node2) < firstIndexOnRightSide;
return node1Side !== node2Side;
}
function nodesAreEqual(node1, node2) {
const equalPrimitives = node1.value && node2.value && node1.value === node2.value;
if (equalPrimitives) return true;
return node1.operator === node2.operator
&& node1.content && node2.content
&& nodesAreEqual(node1.content[0], node2.content[0])
&& node1.content.length > 1 && node2.content.length > 1
&& nodesAreEqual(node1.content[1], node2.content[1]);
}
function removeNode(node) {
const parent = node.parent;
if (parent.operator === '-' && parent.content.length === 2 && indexWithParent(node) === 0) {
parent.content.shift();
} else if (isUnaryMinus(node.parent)) {
removeNode(node.parent);
} else {
replaceNode(parent, siblingNode(node));
}
}
function siblingNode(node) {
const index = indexWithParent(node);
const siblingIndex = otherIndex(index);
return node.parent.content[siblingIndex];
}
function replaceNode(node, newNode) {
node.parent.content[indexWithParent(node)] = newNode;
newNode.parent = node.parent;
}
function otherIndex(index) {
return index === 1 ? 0 : 1;
}
function indexWithParent(node) {
return node.parent.content[0] === node ? 0 : 1;
}
function reduceFraction(indexes1, indexes2) {
const tree = parseMathText(model.mathText);
const node1 = nodeFromIndexes(indexes1, tree);
const node2 = nodeFromIndexes(indexes2, tree);
if (!nodesAreEqual(node1, node2)) {
return finishWithError('Faktorene er ulike og kan ikke forkortes mot hverandre.');
}
replaceNode(node1, { value: '1' });
replaceNode(node2, { value: '1' });
addParentAndId(tree);
doSimplifications(tree);
model.mathText = toString(tree);
}
function divideBothSides(indexes) {
const tree = parseMathText(model.mathText);
const node = nodeFromIndexes(indexes, tree);
replaceNode(tree.content[0], makeNode('/', [tree.content[0], cloneNode(node)]));
replaceNode(tree.content[1], makeNode('/', [tree.content[1], cloneNode(node)]));
model.mathText = toString(tree);
}
function resetAndUpdateView() {
model.onGoingMathOperation = null;
updateView();
}
function cloneNode(node) {
if (node.value != undefined) return { value: node.value };
return node.content.length === 1
? makeNode(node.operator, [cloneNode(node.content[0])])
: makeNode(node.operator, [cloneNode(node.content[0]), cloneNode(node.content[1])]);
}
const model = {
level: 1,
mathText: 'x=1+2',
ownExercise: '',
showVideos: true,
history: {
items: [],
index: 0,
},
steps: {
selectOneTerm: 'Velg et ledd.',
selectFactorInNumerator: 'Velg en faktor i telleren.',
selectFactorInDenominator: 'Velg en faktor i nevneren.',
selectTopLevelFactor: 'Velg et ledd eller en faktor i et ledd.',
selectNumber: 'Velg et tall.',
},
mathOperations: {},
youTubeVideoIds: [
'',
'4yY3GJ2VJR8',
'ppjutK7iwu8',
'kPK-rbW7Jy8',
'zAbQeidbWdc',
'rgdP8DK9cQ8',
'QejFqIPpos4',
],
};
model.mathOperations[mergeTerms.name] = {
steps: ['selectOneTerm', 'selectOneTerm'],
icon: '∑',//'⭨⭩\n•',
svg: {
path: 'M 3.34 4.7 h -3.2577 v -0.5264 l 1.8548 -1.8411 l -1.8273 -1.7391 v -0.5181 h 3.1226 v 0.4851 h -2.4557 l 1.7859 1.6564 v 0.1984 l -1.8355 1.7997 h 2.6128 z',
viewBox: {
width: 3.34,
height: 4.7,
}
},
description: 'Slå sammen ledd',
levels: {
first: 1,
}
};
model.mathOperations[subtractTermOnBothSides.name] = {
steps: ['selectOneTerm'],
icon: '|−',
svg: {
path: 'M 0.5796 5.4688 h -0.4796 v -5.3688 h 0.4796 z m 3 -2.6 h -1.7198 v -0.4989 h 1.7198 z',
viewBox: {
width: 3.5,
height: 5.5,
}
},
description: 'Trekke fra på begge sider av ligningen',
levels: {
first: 2,
last: 3,
}
};
model.mathOperations[moveTermToOtherSide.name] = {
steps: ['selectOneTerm'],
icon: '↷\n=',
svg: {
path: 'M 0.1 5.8787 q 0 -2.3974 1.6946 -4.0841 q 1.6946 -1.6946 4.0841 -1.6946 q 1.7805 0 3.1783 0.9371 q 1.3978 0.9293 2.1631 2.6316 l 1.1323 -1.3041 l -0.5076 3.8186 l -3.0533 -2.3427 q 1.3822 0.2968 1.718 0.125 l -0.2108 -0.4451 q -1.4915 -2.6551 -4.4121 -2.6551 q -2.0772 0 -3.5453 1.4681 q -1.4681 1.4681 -1.4681 3.5453 z m 8.7 2 h -6.1288 v -0.8268 h 6.1288 z m 0 2.1704 h -6.1288 v -0.8268 h 6.1288 z',
viewBox: {
width: 12.5,
height: 10.5,
}
},
description: 'Flytte ledd til den andre siden av ligningen',
levels: {
first: 4,
}
};
model.mathOperations[divideBothSides.name] = {
steps: ['selectTopLevelFactor'],
icon: '|÷',
svg: {
path: 'M 0.5796 5.4688 h -0.4796 v -5.3688 h 0.4796 z m 3 -2.6 h -1.7198 v -0.4989 h 1.7198 z m -0.6 -0.8 h -0.5 v -0.4989 h 0.5 z m 0 1.6 h -0.5 v -0.4989 h 0.5 z',
viewBox: {
width: 3.5,
height: 5.5,
}
},
description: 'Dele begge sider av ligningen',
levels: {
first: 5,
}
};
model.mathOperations[reduceFraction.name] = {
steps: ['selectFactorInNumerator', 'selectFactorInDenominator'],
icon: '/\n‒\n/',
svg: {
path: 'M 10 1 l -3.7052 9.6118 h -0.894 l 3.6897 -9.6118 z m 3 12 h -12 v -0.8268 h 12 z m -3 1.3 l -3.7052 9.6118 h -0.894 l 3.6897 -9.6118 z',
viewBox: {
width: 14,
height: 24,
}
},
description: 'Forkorte brøk',
levels: {
first: 5,
}
};
model.mathOperations[primeFactorize.name] = {
steps: ['selectNumber'],
icon: '□\n⭩⭨\n□×□',
svg: {
path: 'm 6.2 0.1 h 4.8782 v 4.8782 h -4.8782 z m 0.3927 0.3927 v 4.0928 h 4.0928 v -4.0928 z m -4 10.5 v -2.7027 l 1.08 1.08 l 4.0566 -4.0566 l 0.5426 0.5426 l -4.0566 4.0566 l 1.08 1.08 z m 12 0 v -2.7027 l -1.08 1.08 l -4.0566 -4.0566 l -0.5426 0.5426 l 4.0566 4.0566 l -1.08 1.08 z m -14.4 0.4 h 4.8782 v 4.8782 h -4.8782 z m 0.3927 0.3927 v 4.0928 h 4.0928 v -4.0928 z m 11.5 -0.376 h 4.8782 v 4.8782 h -4.8782 z m 0.3927 0.3927 v 4.0928 h 4.0928 v -4.0928 z m -1.08 4.2 l -0.5736 0.5736 l -2.2531 -2.2738 l -2.2531 2.2738 l -0.5736 -0.5736 l 2.2738 -2.2531 l -2.2738 -2.2531 l 0.5736 -0.5736 l 2.2531 2.2738 l 2.2531 -2.2738 l 0.5736 0.5736 l -2.2738 2.2531 z',
viewBox: {
width: 17,
height: 17,
}
},
description: 'Primtallsfaktorisere',
levels: {
first: 6,
}
};
model.mathOperations[undo.name] = {
steps: [],
icon: '^⮪',
svg: {
path: 'm 0.1 2.2 l 2.0826 -2.0877 v 1.0697 h 2.7182 q 1.4624 0 2.0671 0.4651 q 0.4651 0.3514 0.6201 0.8268 q 0.1602 0.4703 0.1602 1.5038 v 1.0284 h -0.2532 v -0.1395 q 0 -0.9302 -0.4341 -1.2867 q -0.4289 -0.3617 -1.5451 -0.3617 h -3.3331 v 1.0697 z',
viewBox: {
width: 8,
height: 5,
}
},
description: 'Angre',
levels: {
first: 0,
}
};
model.mathOperations[redo.name] = {
steps: [],
icon: '^⮫',
svg: {
path: 'm 7.8 2.2 l -2.0877 2.0877 v -1.0697 h -3.328 q -1.1214 0 -1.5503 0.3617 q -0.4289 0.3566 -0.4289 1.2867 v 0.1395 h -0.2532 v -1.0284 q 0 -1.0335 0.155 -1.5038 q 0.1602 -0.4754 0.6201 -0.8268 q 0.6046 -0.4651 2.0671 -0.4651 h 2.7182 v -1.0697 z',
viewBox: {
width: 8,
height: 5,
}
},
description: 'Gjøre omigjen',
levels: {
first: 0,
}
};
newExercise();
function updateView() {
const videoHtml = `<iframe width="560" height="315" src="https://www.youtube.com/embed/${model.youTubeVideoIds[model.level]}" frameborder="0" allow="accelerometer; autoplay; encrypted-media; gyroscope; picture-in-picture" allowfullscreen></iframe>`;
document.getElementById('app').innerHTML = `
<div class="mainPart history historyPre">
${createHistoryHtml(true)}
</div>
<div id="mathContent" class="math mainPart">
${createMathText(model.mathText, getStep())}
</div>
<div class="mainPart history">
${createHistoryHtml(false)}
</div>
<div class="mainPart panel">
<div id="txt">${getText()}</div>
<div id="menuBar">
${createMenuHtml(model.mathOperations, model.onGoingMathOperation)}
</div>
</div>
<div class="mainPart">
<div>
<button class="video"
onclick="${toggleVideos.name}()">
${model.showVideos ? 'Skjul' : 'Vis' }
video
</button>
</div>
${model.showVideos ? videoHtml : ''}
</div>
<div class="mainPart panel footer">
<div class="levels" >
<button class="exercise" onclick="${newExercise.name}()">Ny nivå ${model.level}-oppgave</button>
<div style="width: 40px"></div>
<input type="text" oninput="${Object.keys({x: model})[0]}.${Object.keys(model)[2]}=this.value"/>
<button class="exercise" onclick="${newCustomExercise.name}()">Ny egen oppgave</button>
</div>
<div class="levels">
Nivåer:
${createLevelsMenuHtml()}
<button onclick="window.open('https://m.me/playterje')" class="level kontakt">Kontakt PlayTerje</div>
</div>
</div>
`;
const el = document.getElementsByClassName('historyPre')[0];
el.scrollTop = el.scrollHeight;
}
function toggleVideos(){
model.showVideos = !model.showVideos;
updateView();
}
function createHistoryHtml(isPreHistory) {
const history = model.history;
const index = history.index;
const allItems = history.items;
const limits = isPreHistory ? [0, index] : [index + 1, allItems.length];
const items = model.history.items.slice(...limits);
return items.map(mathText => `
<div id="mathContent" class="math">
${createMathText(mathText, null)}
</div>
`).join('');
}
function newExercise() {
const fn = levelExerciseFunctions[model.level];
newExerciseImpl(fn());
}
function newCustomExercise() {
model.errorMessage = '';
let result = null;
try {
result = parseMathText(model.ownExercise);
} catch (error) {
model.errorMessage = `Kan ikke tolke uttrykket <tt>${model.ownExercise}</tt>`;
updateView();
return;
}
newExerciseImpl(model.ownExercise);
}
function newExerciseImpl(exercise) {
model.errorMessage = null;
model.mathText = exercise;
model.history.items.length = 0;
model.history.items.push(exercise);
model.history.index = 0;
updateView();
}
function createLevelsMenuHtml() {
return range(1, 7).map(level => `
<button class="level ${level === model.level ? 'selectedLevel' : ''}"
onclick="${selectLevel.name}(${level})">
${level}
</button>
`).join('');
}
function selectLevel(level) {
model.level = level;
updateView();
}
function getStep() {
const operation = model.onGoingMathOperation;
return operation ? operation.step : null;
}
function getText() {
const message = createMessage();
// const error = `<div class="error">${model.errorMessage || ''}</div>`;
if (model.errorMessage) {
const description = createDescriptionHtml('error', model.errorMessage, 'error');
return createText(description, message);
}
const operation = model.onGoingMathOperation;
if (!operation) return createText('', message);
const step = operation ? operation.step : null;
if (!step) return createText();
const operationName = operation.name;
const mathOperation = model.mathOperations[operationName];
const description = createDescriptionHtml(operationName, mathOperation.description);
const length = operation.arguments ? operation.arguments.length : 0;
const stepsHtml = operation.steps.map((step, i) => `
<li class="${i == length ? '' : 'passive'}">${step}</li>
`).join('');
return createText(description, `<ol>${stepsHtml}</ol>`);
}
function createDescriptionHtml(operationName, text, extraCssClass) {
return `
<div class="selectedFunction">
<button class="display ${extraCssClass || ''}" disabled>
${getIcon(operationName)}
</button>
<div class="${extraCssClass || 'function'}">
${text}
</div>
</div>
`;
}
function createMessage() {
return model.mathText && isEquationSolved(model.mathText) ? 'Ligningen er løst.' :
model.level === 0 ? '' :
'Velg operasjon:';
}
function isEquationSolved(mathText) {
const node = parseMathText(mathText);
const letterOnOneSide = isLetter(node.content[0]) || isLetter(node.content[1]);
const numberOnOneSide = isNumber(node.content[0]) || isNumber(node.content[1])
|| isUnaryNumber(node.content[0]) || isUnaryNumber(node.content[1]);
if (letterOnOneSide && numberOnOneSide) return true;
if (!letterOnOneSide) return false;
const fraction = node.content[0].operator === '/' ? node.content[0]
: node.content[1].operator === '/' ? node.content[1] : null;
if (fraction === null) return false;
if (!isNumber(fraction.content[0]) || !isNumber(fraction.content[1])) return false;
const number1 = parseInt(fraction.content[0].value);
const number2 = parseInt(fraction.content[1].value);
return !primeFactorizeImpl(number1).includes('*')
&& !primeFactorizeImpl(number2).includes('*');
}
function isUnaryNumber(node) {
return isUnaryMinus(node) && isNumber(node.content[0]);
}
function createText(fn, step) {
return `
<div>${fn || ' '}</div>
<div class="step"><i>${step || ' '}</i></div>
`;
}
<file_sep>/*
QUnit.test("extract positive constant x=1+2", function (assert) {
// =([0]x,+([10]1,[11]2))
const tree = parseMathText('x=1+2');
const node = nodeFromIndexes('11', tree);
const extraction = extractConstant(node);
assert.equal(extraction.constant, '2');
assert.equal(extraction.theRest, null);
});
QUnit.test("extract negative constant", function (assert) {
// =([0]x,-([10]1,[11]2))
const tree = parseMathText('x=1-2');
const node = nodeFromIndexes('11', tree);
const extraction = extractConstant(node);
assert.equal(extraction.constant, '-2');
assert.equal(extraction.theRest, null);
});
QUnit.test("extract negative unary constant", function (assert) {
// []=([0]x,[1]+([10]1,[11]-([110]2)))
const tree = parseMathText('x=1+(-2)');
const node = nodeFromIndexes('11', tree);
const extraction = extractConstant(node);
assert.equal(extraction.constant, '-2');
assert.equal(extraction.theRest, null);
});
QUnit.test("extract negative unary product", function (assert) {
// []=([0]x,[1]+([10]1,[11]*([110]-([1100]2),[111]3)))
const tree = parseMathText('x=1+(-2*3)');
const node = nodeFromIndexes('11', tree);
const extraction = extractConstant(node);
assert.equal(extraction.constant, '-2');
assert.equal(toString(extraction.theRest), '3');
});
QUnit.test("extract negative unary product - 2", function (assert) {
// []=([0]x,[1]+([10]1,[11]-([110]*([1100]2,[1101]3))))
const tree = parseMathText('x=1+(-(2*3))');
const node = nodeFromIndexes('11', tree);
const extraction = extractConstant(node);
assert.equal(extraction.constant, '-2');
assert.equal(toString(extraction.theRest), '3');
});
QUnit.test("extract negative unary product - 3", function (assert) {
// []=([0]x,[1]+([10]1,[11]-([110]*([1100]2,[1101]3))))
const tree = parseMathText('x=1+(-(x*3))');
const node = nodeFromIndexes('11', tree);
const extraction = extractConstant(node);
assert.equal(extraction.constant, '-3');
assert.equal(toString(extraction.theRest), 'x');
});
QUnit.test("extract product", function (assert) {
// []=([0]x,[1]+([10]1,[11]*([110]2,[111]3)))
const tree = parseMathText('x=1+2*3');
const node = nodeFromIndexes('11', tree);
const extraction = extractConstant(node);
assert.equal(extraction.constant, '2');
assert.equal(toString(extraction.theRest), '3');
});
QUnit.test("extract constant in x-term", function (assert) {
// []=([0]x,[1]+([10]1,[11]*([110]2,[111]x)))
const tree = parseMathText('x=1+2*x');
const node = nodeFromIndexes('11', tree);
const extraction = extractConstant(node);
assert.equal(extraction.constant, '2');
assert.equal(extraction.theRest.value, 'x');
});
QUnit.test("extract negative constant in x-term", function (assert) {
// []=([0]x,[1]-([10]1,[11]*([110]2,[111]x)))
const tree = parseMathText('x=1-2*x');
const node = nodeFromIndexes('11', tree);
const extraction = extractConstant(node);
assert.equal(extraction.constant, '-2');
assert.equal(extraction.theRest.value, 'x');
});
QUnit.test("extract unary minus constant in x-term", function (assert) {
// []=([0]x,[1]+([10]*([100]-([1000]3),[101]x),[11]*([110]5,[111]x)))
const tree = parseMathText('x=-3*x+5*x');
const node = nodeFromIndexes('10', tree);
const extraction = extractConstant(node);
assert.equal(extraction.constant, '-3');
assert.equal(extraction.theRest.value, 'x');
});
QUnit.test("extract 1. in minus", function (assert) {
// []=([0]x,[1]-([10]1,[11]2))
const tree = parseMathText('x=1-2');
const node = nodeFromIndexes('10', tree);
const extraction = extractConstant(node);
assert.equal(extraction.constant, '1');
assert.equal(extraction.theRest, null);
});
*/<file_sep>/*
EXPR --> TERM {( "+" | "-" ) TERM}
TERM --> FACT {( "*" | "/" ) FACT}
FACTOR --> P ["^" FACTOR]
P --> v | "(" EXPRESSION ")" | "-" TERM
*/
//parse(['x']);
// parse(['x','+','y']);
// console.log(parse(['x', '+', 'y', '*', '2']));
//console.log(parse(['x', '+', '1', '/', '(', 'y', '+', '2', ')']));
//setTimeout(() => show('2+x-1'), 100);
function parseMathText(mathText) {
const equalSignIndex = mathText.indexOf('=');
if (equalSignIndex == -1) {
const tokens = lex(mathText);
return parse(tokens);
}
const leftSide = mathText.substr(0, equalSignIndex);
const rightSide = mathText.substr(equalSignIndex + 1);
const leftSideTree = parseMathText(leftSide);
const rightSideTree = parseMathText(rightSide);
let tree = makeNode('=', [leftSideTree, rightSideTree]);
tree = addParentAndId(tree, null);
return tree;
}
function toString(node) {
if (node.value != undefined) {
return node.value;
}
if (isUnaryMinus(node)) {
if (isFirstTerm(node)) return '-' + toString(node.content[0]);
else return '(-' + toString(node.content[0]) + ')';
}
if (node.operator === '/') {
return '(' + toString(node.content[0]) + ')' + node.operator + '(' + toString(node.content[1]) + ')';
}
if (node.operator === '-' && '+-'.includes(node.content[1].operator)) {
return toString(node.content[0]) + node.operator + '(' + toString(node.content[1]) + ')';
}
return toString(node.content[0]) + node.operator + toString(node.content[1]);
}
function isFirstTerm(node) {
const isFirstWithParent = node.parent && node === node.parent.content[0];
if (node.parent && '+-'.includes(parentOperator(node))) return isFirstWithParent && isFirstTerm(node.parent);
return true;
}
function addParentAndId(node, parent) {
if (!node) return;
node.parent = parent;
if (!node.content) return;
for (var child of node.content) {
addParentAndId(child, node);
}
return node;
}
function lex(mathText) {
const isDigit = char => char >= '0' && char <= '9';
const lastCharacter = text => text.length === 0 ? null : text[text.length - 1];
const addSeparator = (char, text) => text.length > 0 && (!isDigit(char) || !isDigit(lastCharacter(text)));
const separator = (char, text) => addSeparator(char, text) ? ',' : '';
const handleOneChar = (total, current) => total + separator(current, total) + current;
const chars = mathText.split('');
const charsWithSeparators = chars.reduce(handleOneChar, '');
return charsWithSeparators.split(',');
}
function parse(tokens) {
const state = parseExpression(tokens);
return state.tokens.length > 0 ? null : state.tree;
}
function parseExpression(tokens) {
return parseMultipart(tokens, '+-', parseTerm);
}
function parseTerm(tokens) {
return parseMultipart(tokens, '*/', parseFactor);
}
function parseMultipart(tokens, operators, parseFn) {
let partState1 = parseFn(tokens);
while (operators.includes(partState1.tokens[0])) {
const operator = partState1.tokens.shift();
const partState2 = parseFn(partState1.tokens);
partState1.tree = makeNode(operator, [partState1.tree, partState2.tree])
partState1.tokens = partState2.tokens;
}
return partState1;
}
function parseFactor(tokens) {
const state = parseParenthesisValueOrUnary(tokens);
let myTokens = state.tokens;
if (myTokens[0] !== '^') return state;
myTokens.shift();
const factorState = parseFactor(myTokens);
return makeState(factorState.tokens, makeNode('^', [state.tree, factorState.tree]));
}
function parseParenthesisValueOrUnary(tokens) {
if (isNumberOrLetter(tokens[0])) {
const value = tokens.shift();
return makeState(tokens, makeLeaf(value));
} else if (tokens[0] === '(') {
tokens.shift();
const state = parseExpression(tokens);
if (tokens.shift() !== ')') console.error('expected )');
return state;
} else if (tokens[0] === '-') {
tokens.shift();
const state = parseFactor(tokens);
return makeState(tokens, makeNode('-', [state.tree]));
} else {
console.error('Error in parseParenthesisValueOrUnary. Tokens: ', tokens)
}
}
function isNumberOrLetter(text) {
return text[0] >= '0' && text[0] <= '9' || text[0] >= 'a' && text[0] <= 'z';
}
function makeNode(operator, content) {
return { operator, content };
}
function makeLeaf(value) {
return { value };
}
function makeState(tokens, tree) {
return { tokens, tree };
}<file_sep>
QUnit.test("remove node - first in minus", function (assert) {
const tree = parseMathText('x=1-2');
const rightSide = tree.content[1];
removeNode(rightSide.content[0]);
const newMathText = toString(tree);
assert.equal(newMathText, 'x=-2');
});
QUnit.test("remove node - second in minus", function (assert) {
const tree = parseMathText('x=1-2');
const rightSide = tree.content[1];
removeNode(rightSide.content[1]);
const newMathText = toString(tree);
assert.equal(newMathText, 'x=1');
});
<file_sep>QUnit.test("subtract term - x-2=-3", function (assert) {
const node = parseMathText('x=1/5');
assert.equal(node.operator, '=');
});
|
c6336aeb69c02f1b17c23e47e42a03c2a029ee4b
|
[
"JavaScript"
] | 11 |
JavaScript
|
terjekol/FunkyTerje
|
3b272cc2be3dbe965031f8e3a1f807fba75db0d3
|
05f9a336ea6bb4fae5a9b060697bd2191849f1aa
|
refs/heads/master
|
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
using System.Data.SqlClient;
using QuanLiKhachSan.DTO;
using QuanLiKhachSan.DAO;
namespace QuanLiKhachSan
{
public partial class fThongTinTaiKhoan : Form
{
EC_NHANVIEN ec = new EC_NHANVIEN();
EC_TAIKHOAN ectk = new EC_TAIKHOAN();
public fThongTinTaiKhoan()
{
this.Controls.Clear();
InitializeComponent();
}
private void txtReNewPassWord_TextChanged(object sender, EventArgs e)
{
}
public void setnull()
{
txtpass.Text = "";
txtNewPassWord.Text = "";
txtReNewPassWord.Text = "";
}
public void hienthi()
{
string query = "SELECT NHANVIEN.MACHUCVU,NHANVIEN.MANHANVIEN,NHANVIEN.TENNHANVIEN, NHANVIEN.GIOITINH, NHANVIEN.NGAYSINH, NHANVIEN.DIACHI, TAIKHOAN.TENTAIKHOAN, TAIKHOAN.PASS, NHANVIEN.SODIENTHOAI FROM NHANVIEN INNER JOIN TAIKHOAN ON NHANVIEN.MANHANVIEN = TAIKHOAN.MANHANVIEN WHERE TAIKHOAN.TENTAIKHOAN='" + fLogin.TaiKhoan + "'";
DataTable dt = DataProvider.Instance.ExecuteQuery(query, new object[] { });
MACV = dt.Rows[0]["MACHUCVU"].ToString().Trim();
MANV = dt.Rows[0]["MANHANVIEN"].ToString().Trim();
txtTaiKhoan.Text = dt.Rows[0]["TENTAIKHOAN"].ToString().Trim();
txtTenNV.Text = dt.Rows[0]["TENNHANVIEN"].ToString().Trim();
txtDiachi.Text = dt.Rows[0]["DIACHI"].ToString().Trim();
cbGioitinh.Text = dt.Rows[0]["GIOITINH"].ToString().Trim();
dtNgaysinh.Text = dt.Rows[0]["NGAYSINH"].ToString().Trim();
txtSDT.Text = dt.Rows[0]["SODIENTHOAI"].ToString().Trim();
}
private void btnCapNhap_Click(object sender, EventArgs e)
{
if (txtpass.Text != fLogin.MatKhau || txtNewPassWord.Text != txtReNewPassWord.Text)
{
MessageBox.Show("Nhập lại mật khẩu!!!");
return;
}
else
{
try
{
ec.MaChucVu = MACV;
ec.TenNhanVien = txtTenNV.Text;
ec.DiaChi = txtTenNV.Text;
ec.NgaySinh = dtNgaysinh.Value;
ec.GioiTinh = cbGioitinh.Text;
ec.SDT = txtSDT.Text;
ec.MaNhanVien = MANV;
ectk.MaNhanVien = MANV;
ectk.TenTaiKhoan = txtTaiKhoan.Text;
ectk.MatKhau = txtNewPassWord.Text;
fLogin.MatKhau = txtNewPassWord.Text;
NhanVienDAO.Instances.SuaTK(ectk);
NhanVienDAO.Instances.SuaNhanVien(ec);
MessageBox.Show("Thực hiện thành công!!!");
}
catch
{
MessageBox.Show("Lỗi!!!");
return;
}
}
setnull();
hienthi();
}
String MANV;
String MACV;
private void txtSDT_KeyPress(object sender, KeyPressEventArgs e)
{
if (!Char.IsDigit(e.KeyChar) && !Char.IsControl(e.KeyChar))
e.Handled = true;
}
private void txtDiachi_KeyPress(object sender, KeyPressEventArgs e)
{
e.Handled = e.KeyChar != (char)Keys.Back && !char.IsSeparator(e.KeyChar) && !char.IsLetter(e.KeyChar) && !char.IsDigit(e.KeyChar);
}
private void cbGioitinh_KeyPress(object sender, KeyPressEventArgs e)
{
e.Handled = e.KeyChar != (char)Keys.Back && !char.IsSeparator(e.KeyChar) && !char.IsLetter(e.KeyChar) && !char.IsDigit(e.KeyChar);
}
private void txtTenNV_KeyPress(object sender, KeyPressEventArgs e)
{
e.Handled = e.KeyChar != (char)Keys.Back && !char.IsSeparator(e.KeyChar) && !char.IsLetter(e.KeyChar) && !char.IsDigit(e.KeyChar);
}
private void txtpass_KeyPress(object sender, KeyPressEventArgs e)
{
e.Handled = e.KeyChar != (char)Keys.Back && !char.IsSeparator(e.KeyChar) && !char.IsLetter(e.KeyChar) && !char.IsDigit(e.KeyChar);
}
private void txtNewPassWord_KeyPress(object sender, KeyPressEventArgs e)
{
e.Handled = e.KeyChar != (char)Keys.Back && !char.IsSeparator(e.KeyChar) && !char.IsLetter(e.KeyChar) && !char.IsDigit(e.KeyChar);
}
private void txtReNewPassWord_KeyPress(object sender, KeyPressEventArgs e)
{
e.Handled = e.KeyChar != (char)Keys.Back && !char.IsSeparator(e.KeyChar) && !char.IsLetter(e.KeyChar) && !char.IsDigit(e.KeyChar);
}
private void fThongTinTaiKhoan_Load(object sender, EventArgs e)
{
hienthi();
setnull();
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
using QuanLiKhachSan.DAO;
namespace QuanLiKhachSan
{
public partial class fCongNoTraPhong : Form
{
public fCongNoTraPhong()
{
InitializeComponent();
LoadCongNo();
}
public void LoadCongNo()
{
string query = "execute CongNo";
DataTable dataTable = DataProvider.Instance.ExecuteQuery(query);
grvCongNo.DataSource = dataTable;
}
private void txtTimKiemCongNo_TextChanged(object sender, EventArgs e)
{
if (txtTimKiemCongNo.Text == "")
{
LoadCongNo();
return;
}
string query = "execute TimCongNo @cmt";
DataTable dataTable = DataProvider.Instance.ExecuteQuery(query,new object[] {txtTimKiemCongNo.Text });
grvCongNo.DataSource = dataTable;
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace QuanLiKhachSan.DTO
{
public class EC_TAIKHOAN
{
private string _TenTaiKhoan;
private string _MatKhau;
private string _MaChucVu;
private string _MaNhanVien;
public string TenTaiKhoan { get => _TenTaiKhoan; set => _TenTaiKhoan = value; }
public string MatKhau { get => _MatKhau; set => _MatKhau = value; }
public string MaChucVu { get => _MaChucVu; set => _MaChucVu = value; }
public string MaNhanVien { get => _MaNhanVien; set => _MaNhanVien = value; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
using QuanLiKhachSan.DAO;
using QuanLiKhachSan.DTO;
namespace QuanLiKhachSan
{
public partial class fThongTinKhach : Form
{
EC_KHACHHANG ec = new EC_KHACHHANG();
public fThongTinKhach()
{
InitializeComponent();
}
string insert = "execute ThemKhachHangVaoDanhSach @TENKH , @GIOITINH , @NGAYSINH , @DIACHI , @SODIENTHOAI , @CHUNGMINHTHU";
string select = "select * from KHACHHANG";
public void hienthi()
{
try
{
daKH.DataSource = KhachHangDAO_thinh.Instances.Taobang(select);
}catch (Exception ex)
{
MessageBox.Show(ex.Message, "Thông báo", MessageBoxButtons.OK, MessageBoxIcon.Warning);
return;
}
}
public void setnull()
{
txtMaKH.Text = "";
txtTenkh.Text = "";
txtDiachi.Text = "";
txtSDT.Text = "";
cbGioitinh.Text = "";
txtCMT.Text = "";
}
private void btnThem_Click(object sender, EventArgs e)
{
txtMaKH.Text = "";
List<string> MyList = new List<string>();
int rows = daKH.Rows.Count;
for(int i =0 ; i < daKH.Rows.Count-1; i++)
{
MyList.Add(daKH.Rows[i].Cells[6].Value.ToString());
}
if (MyList.Any(item => item == txtCMT.Text)==true)
{
MessageBox.Show("Số chứng minh thư đã tồn tại, mời nhập lại!!!");
return;
}
else
if (txtTenkh.Text == "")
{
MessageBox.Show("Nhập đầy đủ thông tin!!!");
return;
}
else
{
try
{
ec.Ngaysinh = dtNgaysinh.Value.Date;
ec.SDT = txtSDT.Text;
ec.TenKH = txtTenkh.Text;
ec.GioiTinh = cbGioitinh.Text;
ec.DiaChi = txtDiachi.Text;
ec.SOCMT = txtCMT.Text;
KhachHangDAO_thinh.Instances.ThemKhachHangVaoDanhSach(ec,insert);//??
MessageBox.Show("Thực hiện thành công!!!");
}
catch (Exception ex)
{
MessageBox.Show(ex.Message, "Thông báo", MessageBoxButtons.OK, MessageBoxIcon.Warning);
return;
}
}
setnull();
hienthi();
}
private void btnXoa_Click(object sender, EventArgs e)
{
Xoa();
}
private void Xoa()
{
if (txtMaKH.Text == "")
{
MessageBox.Show("Chọn mã nhân viên!!!");
return;
}
else
{
try
{
if(rbtnMatdlcn.Checked==true)
{
ec.MaKH = txtMaKH.Text;
KhachHangDAO_thinh.Instances.XoaKhachHang(ec);
MessageBox.Show("Thực hiện thành công!!!");
}
if (rbtnFixMatdl.Checked==true) {
if (DAO.KhachHangDAO_huy.Instances.TimKHTheoMAKH(txtMaKH.Text) == null)
{
MessageBox.Show("khách hàng này đã được xóa bởi user khác!");
}
else
{
ec.MaKH = txtMaKH.Text;
KhachHangDAO_thinh.Instances.XoaKhachHang(ec);
MessageBox.Show("Thực hiện thành công!!!");
}
}
}
catch (Exception ex)
{
MessageBox.Show(ex.Message, "Thông báo", MessageBoxButtons.OK, MessageBoxIcon.Warning);
return;
}
}
setnull();
hienthi();
}
private void btnSua_Click(object sender, EventArgs e)
{
if(txtMaKH.Text == "")
{
MessageBox.Show("Chọn hàng cần sửa!!!");
return;
}
else
{
try
{
ec.Ngaysinh = dtNgaysinh.Value.Date;
ec.MaKH = txtMaKH.Text;
ec.SDT = txtSDT.Text;
ec.TenKH = txtTenkh.Text;
ec.GioiTinh = cbGioitinh.Text;
ec.DiaChi = txtDiachi.Text;
ec.SOCMT = txtCMT.Text;
KhachHangDAO_thinh.Instances.SuaKhachHangVaoDanhSach(ec);
MessageBox.Show("Thực hiện thành công!!!");
}
catch(Exception ex)
{
MessageBox.Show(ex.Message,"Thông báo", MessageBoxButtons.OK,MessageBoxIcon.Warning);
return;
}
}
hienthi();
setnull();
}
private void txtSDT_KeyPress(object sender, KeyPressEventArgs e)
{
if (!Char.IsDigit(e.KeyChar) && !Char.IsControl(e.KeyChar))
e.Handled = true;
}
private void txtCMT_KeyPress(object sender, KeyPressEventArgs e)
{
if (!Char.IsDigit(e.KeyChar) && !Char.IsControl(e.KeyChar))
e.Handled = true;
}
private void daKH_RowHeaderMouseClick(object sender, DataGridViewCellMouseEventArgs e)
{
txtMaKH.Text = daKH.SelectedRows[0].Cells[0].Value.ToString();
txtTenkh.Text = daKH.SelectedRows[0].Cells[1].Value.ToString();
dtNgaysinh.Text = daKH.SelectedRows[0].Cells[3].Value.ToString();
cbGioitinh.Text = daKH.SelectedRows[0].Cells[2].Value.ToString();
txtSDT.Text = daKH.SelectedRows[0].Cells[5].Value.ToString();
txtDiachi.Text = daKH.SelectedRows[0].Cells[4].Value.ToString();
txtCMT.Text = daKH.SelectedRows[0].Cells[6].Value.ToString();
}
public void fThongTinKhach_Load(object sender, EventArgs e)
{
hienthi();
txtMaKH.Enabled = false;
}
private void txtTenkh_KeyPress(object sender, KeyPressEventArgs e)
{
e.Handled = e.KeyChar != (char)Keys.Back && !char.IsSeparator(e.KeyChar) && !char.IsLetter(e.KeyChar) && !char.IsDigit(e.KeyChar);
}
private void cbGioitinh_KeyPress(object sender, KeyPressEventArgs e)
{
e.Handled = e.KeyChar != (char)Keys.Back && !char.IsSeparator(e.KeyChar) && !char.IsLetter(e.KeyChar) && !char.IsDigit(e.KeyChar);
}
private void txtDiachi_KeyPress(object sender, KeyPressEventArgs e)
{
e.Handled = e.KeyChar != (char)Keys.Back && !char.IsSeparator(e.KeyChar) && !char.IsLetter(e.KeyChar) && !char.IsDigit(e.KeyChar);
}
private void daKH_CellClick(object sender, DataGridViewCellEventArgs e)
{
txtMaKH.Text = daKH.SelectedRows[0].Cells[0].Value.ToString();
txtTenkh.Text = daKH.SelectedRows[0].Cells[1].Value.ToString();
dtNgaysinh.Text = daKH.SelectedRows[0].Cells[3].Value.ToString();
cbGioitinh.Text = daKH.SelectedRows[0].Cells[2].Value.ToString();
txtSDT.Text = daKH.SelectedRows[0].Cells[5].Value.ToString();
txtDiachi.Text = daKH.SelectedRows[0].Cells[4].Value.ToString();
txtCMT.Text = daKH.SelectedRows[0].Cells[6].Value.ToString();
}
private void rbDocDuLieuRac_CheckedChanged(object sender, EventArgs e)
{
insert = "execute ThemKhachHang_thinh @TENKH , @GIOITINH , @NGAYSINH , @DIACHI , @SODIENTHOAI , @CHUNGMINHTHU";
select = "set tran isolation level read uncommitted select * from KHACHHANG";
}
private void radioButton1_CheckedChanged(object sender, EventArgs e)
{
select = "set tran isolation level read committed select * from KHACHHANG";
}
private void btnXem_Click(object sender, EventArgs e)
{
hienthi();
}
private void rbFix_Repeatable_CheckedChanged(object sender, EventArgs e)
{
select = "execute hienthikhachhang_Fix_Repeatable";
}
private void rbRepeatable_read_CheckedChanged(object sender, EventArgs e)
{
select = "execute hienthikhachhang_Repeatable";
}
private void rb_Bong_ma_CheckedChanged(object sender, EventArgs e)
{
insert = "execute ThemKhachHangVaoDanhSach @TENKH , @GIOITINH , @NGAYSINH , @DIACHI , @SODIENTHOAI , @CHUNGMINHTHU";
select = "execute hienthikhachhang_bongma";
}
private void rFix_bong_ma_CheckedChanged(object sender, EventArgs e)
{
insert = "execute ThemKhachHangVaoDanhSach @TENKH , @GIOITINH , @NGAYSINH , @DIACHI , @SODIENTHOAI , @CHUNGMINHTHU";
select = "execute hienthikhachhang_fix_bongma";
}
}
}
<file_sep>using QuanLiKhachSan.DTO;
using System;
using System.Collections.Generic;
using System.Data;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace QuanLiKhachSan.DAO
{
public class DichVuDAO
{
private static DichVuDAO instances;
public static DichVuDAO Instances {
get
{
if (instances == null) instances = new DichVuDAO();return instances;
}
set => instances = value; }
public DichVuDAO() { }
public List<DichVu> LoadDanhSachDichVu()
{
string query = "select * from DichVu";
DataTable dataTable = DataProvider.Instance.ExecuteQuery(query);
List<DichVu> danhsachdichvu = new List<DichVu>();
foreach (DataRow item in dataTable.Rows)
{
DichVu dichVu = new DichVu(item);
danhsachdichvu.Add(dichVu);
}
return danhsachdichvu;
}
public List<DatDichVu> LoadDanhSachDatDichVu(string madatphong)
{
string query = "execute DanhSachDatDichVu @madatphong";
DataTable dataTable = DataProvider.Instance.ExecuteQuery(query,new object[] { madatphong});
List<DatDichVu> danhsachdichvu = new List<DatDichVu>();
foreach (DataRow item in dataTable.Rows)
{
DatDichVu dichVu = new DatDichVu(item);
danhsachdichvu.Add(dichVu);
}
return danhsachdichvu;
}
public List<DatDichVu> LoadDanhSachDatDichVu()
{
string query = "select * from DatDichVu";
DataTable dataTable = DataProvider.Instance.ExecuteQuery(query);
List<DatDichVu> danhsachdichvu = new List<DatDichVu>();
foreach (DataRow item in dataTable.Rows)
{
DatDichVu dichVu = new DatDichVu(item);
danhsachdichvu.Add(dichVu);
}
return danhsachdichvu;
}
public void ThemDatDichVu(string madp, string madv, int soLuong, DateTime ngayDung, float giadichvu)
{
string query = "execute ThemDatDichVu @madp , @madv , @SoLuong , @ngayDung , @giadichvuhientai";
DataProvider.Instance.ExecuteQuery(query, new object[] { madp, madv, soLuong, ngayDung, giadichvu });
}
public void ThemDatDichVuSoLuong(string madp, string madv, int soLuong, DateTime ngayDung, float giadichvu)
{
string query = "execute ThemSoLuongDatDichVu @madp , @madv , @SoLuong , @ngayDung , @giadichvuhientai ";
DataProvider.Instance.ExecuteQuery(query, new object[] { madp, madv, soLuong, ngayDung, giadichvu });
}
public void SuaDatDichVuSoLuong(string madp, string madv, int soLuong, DateTime ngayDung, float giadichvu)
{
string query = "execute SuaSoLuongDatDichVu @madp , @madv , @SoLuong , @ngayDung , @giadichvuhientai ";
DataProvider.Instance.ExecuteQuery(query, new object[] { madp, madv, soLuong, ngayDung, giadichvu });
}
public void XoaDatDichVuSoLuong(string madp, string madv, int soLuong, DateTime ngayDung)
{
string query = "execute XoaDatDichVuSoLuong @madp , @madv , @SoLuong , @ngayDung";
DataProvider.Instance.ExecuteQuery(query, new object[] { madp, madv, soLuong, ngayDung });
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Data;
using QuanLiKhachSan.DTO;
namespace QuanLiKhachSan.DAO
{
public class CTHD
{
public static DataTable TimMDP(string maDP)
{
return DAO.DataProvider.Instance.ExecuteQuery("exec dbo.TimMDP_madp @madp='"+maDP+"'");
}
public static DataTable TimMDP_tuMAPH(string maPH)
{
return DAO.DataProvider.Instance.ExecuteQuery("exec dbo.TimMDP_tuMAPH @maPH='"+maPH+"'");
}
public static DataTable TimMDP_tuMAPH_tuonglai(string maPH)
{
return DAO.DataProvider.Instance.ExecuteQuery("select * from DatPhong where MAPHONG='"+maPH+"' and TrangThaiThanhToan=0 and NGAYO>GETDATE()");
}
public static DataTable TimCTHD_tuMAHD(string maHD)
{
return DAO.DataProvider.Instance.ExecuteQuery("exec dbo.TimCTHD_tuMAHD @maHD='"+maHD+"'");
}
public static DataTable DSCTHD_FromMHD(string maHD)
{
return DAO.DataProvider.Instance.ExecuteQuery("exec dbo.DSCTHD_FromMHD @maHD='"+maHD+"'");
}
public static string TimMDP_tuMAHD(string mahd)
{
DataTable dt = DataProvider.Instance.ExecuteQuery("exec dbo.TimMDP_tuMAHD @mahd='"+mahd+"'");
return dt.Rows[0]["MADATPHONG"].ToString();
}
public static int UpdatePhong(string maphong)
{
return DAO.DataProvider.Instance.ExecuteNonQuery("exec dbo.UpdatePhong_bay @maph='"+maphong+"'");
}
//thông tin phòng và tiền phòng
public static DataTable ttTienPhong_tuMADP(string maDP)
{
return DAO.DataProvider.Instance.ExecuteQuery("exec dbo.ttTienPhong_tuMADP @madp='"+maDP+"'");
}
public static DataTable ttTienPhong(string maPH)
{
return DAO.DataProvider.Instance.ExecuteQuery("exec dbo.ttTienPhong @maph='"+maPH+"'");
}
//thông tin dịch vụ và tiền dịch vụ
public static DataTable ttTienDV(string maPH)
{
return DAO.DataProvider.Instance.ExecuteQuery("select TENDV [Dịch Vụ],SoLuong [Số Lượng],ngayDung [Ngày Sử Dụng], giadichvuhientai [Giá], (giadichvuhientai*SoLuong) [Thành Tiền] from DatDichVu ddv join DICHVU dv on dv.MADV = ddv.madv join DatPhong dp on dp.MADATPHONG = ddv.madp where MAPHONG = '"+maPH+"' and TrangThaiThanhToan = 0");
}
public static DataTable ttTienDV_tuMADP(string maDP)
{
return DAO.DataProvider.Instance.ExecuteQuery("select TENDV [Dịch Vụ],SoLuong [Số Lượng],ngayDung [Ngày Sử Dụng], giadichvuhientai [Giá], (giadichvuhientai*SoLuong) [Thành Tiền] from DatDichVu ddv join DICHVU dv on dv.MADV = ddv.madv where madp='"+maDP+"'");
}
public static KHACHHANG TimKH(string makh)//tìm khách khàng
{
DataTable dt = DataProvider.Instance.ExecuteQuery("exec dbo.TimKH @id='"+makh+"'");
KHACHHANG kh = new KHACHHANG(dt.Rows[0]);
return kh;
}
public static KHACHHANG TimKH_tuMAHD(string maHD)//tìm khách khàng
{
DataTable dt = DataProvider.Instance.ExecuteQuery("exec dbo.TimKH_tuMAHD @mahd='"+maHD+"'");
KHACHHANG kh = new KHACHHANG(dt.Rows[0]);
return kh;
}
public static KHACHHANG TimKH_tuMAPHONG(string maPhong)//tìm khách khàng từ mã phòng
{
DataTable dt = DataProvider.Instance.ExecuteQuery("exec dbo.TimKH_tuMAPHONG @maph='"+maPhong+"'");
KHACHHANG kh = new KHACHHANG(dt.Rows[0]);
return kh;
}
public static NHANVIEN TimNV()//tìm nhân viên
{
DataTable dt = DataProvider.Instance.ExecuteQuery("select * from TAIKHOAN where TENTAIKHOAN='"+fLogin.tentk+"'");
NHANVIEN nv = new NHANVIEN(dt.Rows[0]);
return nv;
}
public static int ThemCTHD(string mahd,string madp,string ghichu)
{
return DAO.DataProvider.Instance.ExecuteNonQuery("exec dbo.ThemCTHD @mahd='"+mahd+"',@madp='"+madp+"',@ghichu='"+ghichu+"'");
}
//update trang thai thanh toan của dat phong và ngày đi
public static int UpdateDP(string madp)
{
return DataProvider.Instance.ExecuteNonQuery("exec dbo.UpdateDP @madp='"+madp+"'");
}
//xóa chitiethoadon
public static int DeleteCTHD(string mahd)
{
return DataProvider.Instance.ExecuteNonQuery("exec dbo.DeleteCTHD @mahd='"+mahd+"'");
}
//xoa datphong
public static int DeleteDatPhong(string madatphong)
{
return DataProvider.Instance.ExecuteNonQuery("exec dbo.DeleteDatPhong @madp='"+madatphong+"'");
}
public static int DeleteDatDV(string maDP)
{
return DataProvider.Instance.ExecuteNonQuery("exec dbo.DeleteDatDV @madp='"+maDP+"'");
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
using QuanLiKhachSan.DAO;
namespace QuanLiKhachSan
{
public partial class fLichLamViec : Form
{
public fLichLamViec()
{
InitializeComponent();
}
public void hienthi()
{
daLichlamviec.DataSource = LichLamViecDAO.Instances.Taobang2(fLogin.TaiKhoan);
}
private void panel1_Paint(object sender, PaintEventArgs e)
{
hienthi();
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
using QuanLiKhachSan.DAO;
using QuanLiKhachSan.DTO;
namespace QuanLiKhachSan
{
public partial class fDatDichVu : Form
{
public fDatDichVu()
{
InitializeComponent();
}
private void Loaddulieuvaocbb()
{
List<DatPhong> danhsachdatphong = DatPhongDAO.Instances.HienThiDanhSachDatPhongChuaThanhToan();
cbbMaDP.DataSource = danhsachdatphong;
cbbMaDP.DisplayMember = "MaDatPhong";
List<DichVu> danhsachdichvu = DichVuDAO.Instances.LoadDanhSachDichVu();
cbbMaDV.DataSource = danhsachdichvu;
cbbMaDV.DisplayMember = "TenDV";
}
private void LoadDanhSachDatDichVu()
{
if (cbbMaDP.SelectedItem == null) return;
string madatphong = (cbbMaDP.SelectedItem as DatPhong).MaDatPhong;
List<DatDichVu> danhsachdatdichvu = DichVuDAO.Instances.LoadDanhSachDatDichVu(madatphong);
grVDatDichVu.DataSource = danhsachdatdichvu;
}
private void LoadDanhSachDatPhong()
{
List<DatPhong> danhsachdatphong = DatPhongDAO.Instances.HienThiDanhSachDatPhong();
grVDatPhong.DataSource = danhsachdatphong;
}
private void btnThem_Click(object sender, EventArgs e)
{
if (cbbMaDP.SelectedItem == null)
{
MessageBox.Show("Chưa Có Ai Đặt Phòng");
return;
}
List<DatPhong> danhsachdatphong = DatPhongDAO.Instances.HienThiDanhSachDatPhong();
string madatphong = (cbbMaDP.SelectedItem as DatPhong).MaDatPhong;
List<DatDichVu> danhsachdatdichvu = DichVuDAO.Instances.LoadDanhSachDatDichVu(madatphong);
string madv = (cbbMaDV.SelectedItem as DichVu).MaDV;
string tendv = (cbbMaDV.SelectedItem as DichVu).TenDV;
DateTime ngayDung = dTPNgayDung.Value.Date;
foreach (DatPhong item in danhsachdatphong)
{
if (madatphong == item.MaDatPhong)
{
if (ngayDung < item.NgayO || ngayDung > item.NgayDi)
{
MessageBox.Show("Ngày Này Phòng Này Chưa Có Người Ở Hoặc Đã Đi Rồi");
return;
}
}
}
int soLuong = (int)numericUpDown1.Value;
if (soLuong <= 0)
{
MessageBox.Show("Số Lượng Phải Lớn Hơn 0");return;
}
foreach (DatDichVu item in danhsachdatdichvu)
{
if (item.MaDatPhong == madatphong && item.TenDV == tendv && item.NgayDung.Date == ngayDung.Date)
{
DichVuDAO.Instances.ThemDatDichVuSoLuong(madatphong, madv, soLuong, ngayDung,(cbbMaDV.SelectedItem as DichVu).GiaDV);
LoadDanhSachDatPhong();
LoadDanhSachDatDichVu();
return;
}
}
DichVuDAO.Instances.ThemDatDichVu(madatphong, madv, soLuong, ngayDung, (cbbMaDV.SelectedItem as DichVu).GiaDV);
LoadDanhSachDatPhong();
LoadDanhSachDatDichVu();
}
private void btnSua_Click(object sender, EventArgs e)
{
if (cbbMaDP.SelectedItem == null)
{
MessageBox.Show("Chưa Có Ai Đặt Phòng");
return;
}
List<DatPhong> danhsachdatphong = DatPhongDAO.Instances.HienThiDanhSachDatPhong();
string madatphong = (cbbMaDP.SelectedItem as DatPhong).MaDatPhong;
string madv = (cbbMaDV.SelectedItem as DichVu).MaDV;
string tendv = (cbbMaDV.SelectedItem as DichVu).TenDV;
List<DatDichVu> danhsachdatdichvu = DichVuDAO.Instances.LoadDanhSachDatDichVu(madatphong);
DateTime ngayDung = dTPNgayDung.Value.Date;
int soLuong = (int)numericUpDown1.Value;
if (soLuong <= 0)
{
MessageBox.Show("Số Lượng Phải Lớn Hơn 0"); return;
}
foreach (DatDichVu item in danhsachdatdichvu)
{
if (item.MaDatPhong == madatphong && item.TenDV == tendv && item.NgayDung.Date == ngayDung.Date)
{
DichVuDAO.Instances.SuaDatDichVuSoLuong(madatphong, madv, soLuong, ngayDung,(cbbMaDV.SelectedItem as DichVu).GiaDV);
LoadDanhSachDatPhong();
LoadDanhSachDatDichVu();
return;
}
}
MessageBox.Show("Bạn Phải Chọn Đúng Mã Đặt, Mã Dịch Vụ, Và Ngày Dùng Để Sửa Số Lượng");
return;
}
private void btnXoa_Click(object sender, EventArgs e)
{
if (cbbMaDP.SelectedItem == null)
{
MessageBox.Show("Chưa Có Ai Đặt Phòng");
return;
}
List<DatPhong> danhsachdatphong = DatPhongDAO.Instances.HienThiDanhSachDatPhong();
string madatphong = (cbbMaDP.SelectedItem as DatPhong).MaDatPhong;
List<DatDichVu> danhsachdatdichvu = DichVuDAO.Instances.LoadDanhSachDatDichVu(madatphong);
string madv = (cbbMaDV.SelectedItem as DichVu).MaDV;
string tendv = (cbbMaDV.SelectedItem as DichVu).TenDV;
DateTime ngayDung = dTPNgayDung.Value.Date;
int soLuong = (int)numericUpDown1.Value;
foreach (DatDichVu item in danhsachdatdichvu)
{
if (item.MaDatPhong == madatphong && item.TenDV == tendv && item.NgayDung.Date == ngayDung.Date)
{
DichVuDAO.Instances.XoaDatDichVuSoLuong(madatphong, madv, soLuong, ngayDung);
LoadDanhSachDatPhong();
LoadDanhSachDatDichVu();
return;
}
}
MessageBox.Show("Bạn Phải Chọn Đúng Mã dp, madv, ngày dùng");
return;
}
private void cbbMaDP_SelectedIndexChanged(object sender, EventArgs e)
{
txtNgayDi.Clear();
txtNgayO.Clear();
txtNgayO.Text = (cbbMaDP.SelectedItem as DatPhong).NgayO.Date.ToString("dd/MM/yyyy");
txtNgayDi.Text = (cbbMaDP.SelectedItem as DatPhong).NgayDi.Value.Date.ToString("dd/MM/yyyy");
LoadDanhSachDatDichVu();
}
private void grVDatDichVu_CellMouseClick(object sender, DataGridViewCellMouseEventArgs e)
{
cbbMaDP.DataBindings.Clear();
cbbMaDP.DataBindings.Add("Text", grVDatDichVu.DataSource, "MaDatPhong");
cbbMaDV.DataBindings.Clear();
cbbMaDV.DataBindings.Add("Text", grVDatDichVu.DataSource, "TenDv");
numericUpDown1.DataBindings.Clear();
numericUpDown1.DataBindings.Add("Text", grVDatDichVu.DataSource, "SoLuong");
dTPNgayDung.DataBindings.Clear();
dTPNgayDung.DataBindings.Add("Text", grVDatDichVu.DataSource, "ngayDung");
}
public void fDatDichVu_Load(object sender, EventArgs e)
{
Loaddulieuvaocbb();
LoadDanhSachDatPhong();
LoadDanhSachDatDichVu();
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Data;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace QuanLiKhachSan.DTO
{
public class TableHienThiThongKeTinhTrangPhong
{
private int tinhTrangPhong;
private int soLuong;
private object item;
public int TinhTrangPhong { get => tinhTrangPhong;private set => tinhTrangPhong = value; }
public int SoLuong { get => soLuong;private set => soLuong = value; }
public TableHienThiThongKeTinhTrangPhong(int tinhtrangphong,int soluong)
{
this.TinhTrangPhong = tinhtrangphong;
this.SoLuong = soluong;
}
public TableHienThiThongKeTinhTrangPhong(DataRow row)
{
this.TinhTrangPhong = (int)row["TINHTRANGPHONG"];
this.SoLuong = (int)row["So Luong"];
}
public TableHienThiThongKeTinhTrangPhong(object item)
{
this.item = item;
}
}
}
<file_sep>using QuanLiKhachSan.DAO;
using QuanLiKhachSan.DTO;
using QuanLiKhachSan.Properties;
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Drawing.Printing;
using System.Globalization;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
namespace QuanLiKhachSan
{
public partial class fBaoCaoHoaDon : Form
{
public static fBaoCaoHoaDon instances;
private int a = 0;
private int tienphong;
private int tiendv;
private string maHD;
private string maPH;
private List<string> lISTMAHD;
public string funData //mã hóa đơn được truyền
{
get { return maHD; }
set { maHD = value; }
}
public static fBaoCaoHoaDon Instances
{
get
{
if (instances == null) instances = new fBaoCaoHoaDon(); return instances;
}
set => instances = value;
}
public string MaPH { get => maPH; set => maPH = value; }
public List<string> LISTMAHD { get => lISTMAHD; set => lISTMAHD = value; }
public void EnableInhoadon(bool a) // enable theo hóa đơn
{
btnThanhToan.Enabled = !a;
btnInhoadon.Enabled = a;
}
public fBaoCaoHoaDon()
{
InitializeComponent();
dtpkNgayThanhToan.Value = DateTime.Now;
EnableInhoadon(false);
}
private void fBaoCaoHoaDon_Load(object sender, EventArgs e)
{
KHACHHANG kh = null;
if (LISTMAHD != null)
{
kh = CTHD.TimKH_tuMAHD(LISTMAHD[0]);
DataTable tp = new DataTable();
DataTable tdv = new DataTable();
foreach (var item in LISTMAHD)
{
MaPH = CTHD.TimMDP(CTHD.TimCTHD_tuMAHD(item).Rows[0]["MADATPHONG"].ToString()).Rows[0]["MAPHONG"].ToString();
tp.Merge( CTHD.ttTienPhong_tuMADP(CTHD.TimMDP_tuMAHD(item)));
tdv.Merge(CTHD.ttTienDV_tuMADP(CTHD.TimMDP_tuMAHD(item)));
a += int.Parse(CTHD.TimMDP(CTHD.TimCTHD_tuMAHD(item).Rows[0]["MADATPHONG"].ToString()).Rows[0]["TRATRUOC"].ToString());
}
grvTienPhong.DataSource = tp;
grvTTDV.DataSource = tdv;
EnableInhoadon(true);
}
if (funData != null)
{
lblMahd.Text = funData;
kh = CTHD.TimKH_tuMAHD(funData);
MaPH = CTHD.TimMDP(CTHD.TimCTHD_tuMAHD(funData).Rows[0]["MADATPHONG"].ToString()).Rows[0]["MAPHONG"].ToString();
grvTienPhong.DataSource = CTHD.ttTienPhong_tuMADP(CTHD.TimMDP_tuMAHD(funData));
grvTTDV.DataSource = CTHD.ttTienDV_tuMADP(CTHD.TimMDP_tuMAHD(funData));
a = int.Parse(CTHD.TimMDP(CTHD.TimCTHD_tuMAHD(funData).Rows[0]["MADATPHONG"].ToString()).Rows[0]["TRATRUOC"].ToString());
EnableInhoadon(true);
}
if(funData==null&& LISTMAHD==null)
{
kh = CTHD.TimKH_tuMAPHONG(MaPH);
grvTienPhong.DataSource = CTHD.ttTienPhong(MaPH);
grvTTDV.DataSource = CTHD.ttTienDV(MaPH);
a = int.Parse((CTHD.TimMDP_tuMAPH(MaPH).Rows[0]["TRATRUOC"].ToString()));
}
LoadTTKH(kh);
LoadTTTien();
}
CultureInfo cul = CultureInfo.GetCultureInfo("vi-VN");
private void LoadTTTien()// load thông tin tiền
{
tiendv = 0;
tienphong = 0;
foreach (DataGridViewRow item in grvTienPhong.Rows)
{
tienphong += Convert.ToInt32(item.Cells[5].Value);
}
foreach (DataGridViewRow item in grvTTDV.Rows)
{
tiendv += Convert.ToInt32(item.Cells[4].Value);
}
lblTienPhong.Text = tienphong.ToString("#,###", cul.NumberFormat) + "(VNĐ)";
if (tiendv != 0) lblTienDV.Text = tiendv.ToString("#,###", cul.NumberFormat) + "(VNĐ)"; else lblTienDV.Text = "0.000(VNĐ)";
lblTongTien.Text = (tienphong + tiendv).ToString("#,###", cul.NumberFormat) + "(VNĐ)";
if (a != 0) lblTratruoc.Text = a.ToString("#,###", cul.NumberFormat) + "(VNĐ)"; else lblTratruoc.Text = "0.000(VNĐ)";
lblConlai.Text = ((tienphong + tiendv) - a).ToString("#,###", cul.NumberFormat) + "(VNĐ)";
}
private void LoadTTKH(KHACHHANG kh)//load thông tin khách hàng
{
lblMaKH.Text = kh.MAKH;
lblHoTen.Text = kh.TENKH;
lblGioiTinh.Text = kh.GIOITINH.ToString();
lblNgaySinh.Text = kh.NGAYSINH.ToString("dd/MM/yyyy");
lblCMND.Text = kh.CMND;
lblDiaChi.Text = kh.DIACHI;
lblSDT.Text = kh.SODIENTHOAI;
}
private void btnThanhToan_Click(object sender, EventArgs e) // event thanh toán
{
HOADON.ThemHD(dtpkNgayThanhToan.Value.ToString("yyyy-MM-dd"), lblMaKH.Text, CTHD.TimNV().manv, tienphong.ToString(), tiendv.ToString());
CTHD.ThemCTHD(HOADON.TimMAHDVuaTao(), CTHD.TimMDP_tuMAPH(MaPH).Rows[0]["MADATPHONG"].ToString(), " ko có gì");
// CTHD.UpdateDP(CTHD.TimMDP_tuMAPH(MaPH).Rows[0]["MADATPHONG"].ToString());
// CTHD.UpdatePhong(MaPH);
lblMahd.Text = HOADON.TimMAHDVuaTao();
InHoaDon();
MessageBox.Show("đã thanh toán thành công");
this.Close();
}
private void InHoaDon() //hàm in hóa đơn
{
printPreviewDialog1.Document = printDocument1;
// printPreviewDialog1.PrintPreviewControl.AutoZoom = false;
(printPreviewDialog1 as Form).WindowState = FormWindowState.Maximized;
printPreviewDialog1.ShowDialog();
this.Close();
}
private void btnInhoadon_Click(object sender, EventArgs e) //event in hóa đơn
{
InHoaDon();
// this.Close();
}
private void printDocument1_PrintPage(object sender, PrintPageEventArgs e)
{
Image img = Resources.hoadon;
e.Graphics.DrawImage(img, 260, 0, img.Width, img.Height);
e.Graphics.DrawString("Ngày: " + DateTime.Now.ToShortDateString(), new Font("Arial", 13, FontStyle.Regular), Brushes.Black, new Point(20, img.Height - 30));
e.Graphics.DrawString("Khách Hàng: " + lblHoTen.Text, new Font("Arial", 13, FontStyle.Regular), Brushes.Black, new Point(20, img.Height + 10));
e.Graphics.DrawString("Hóa Đơn: " + lblMahd.Text, new Font("Arial", 13, FontStyle.Regular), Brushes.Black, new Point(650, img.Height + 10));
e.Graphics.DrawString("---------------------------------------------------------------------------------------------------------------------------------------------------------------------"
, new Font("Arial", 10, FontStyle.Regular), Brushes.Black, new Point(20, img.Height + 40));
e.Graphics.DrawString("HÓA ĐƠN THANH TOÁN", new Font("Arial", 15, FontStyle.Bold), Brushes.OrangeRed, new Point(280, img.Height + 60));
//vẽ thông tin phòng
e.Graphics.DrawString("STT", new Font("Arial", 12, FontStyle.Bold), Brushes.Black, new Point(20, img.Height + 100));
e.Graphics.DrawString("Phòng", new Font("Arial", 12, FontStyle.Bold), Brushes.Black, new Point(90, img.Height + 100));
e.Graphics.DrawString("Giá(VNĐ)", new Font("Arial", 12, FontStyle.Bold), Brushes.Black, new Point(190, img.Height + 100));
e.Graphics.DrawString("Ngày Ở", new Font("Arial", 12, FontStyle.Bold), Brushes.Black, new Point(300, img.Height + 100));
e.Graphics.DrawString("Ngày Đi", new Font("Arial", 12, FontStyle.Bold), Brushes.Black, new Point(420, img.Height + 100));
e.Graphics.DrawString("Số Ngày Ở", new Font("Arial", 12, FontStyle.Bold), Brushes.Black, new Point(537, img.Height + 100));
e.Graphics.DrawString("Thành Tiền(VNĐ)", new Font("Arial", 12, FontStyle.Bold), Brushes.Black, new Point(690, img.Height + 100));
e.Graphics.DrawString("----------------------------------------------------------------------------------------------------------------------------------------------------------------------"
, new Font("Arial", 10, FontStyle.Regular), Brushes.Black, new Point(20, img.Height + 120));
int y = img.Height + 150;
for (int i = 0; i < grvTienPhong.Rows.Count; i++)
{
e.Graphics.DrawString((i + 1).ToString(), new Font("Arial", 13, FontStyle.Regular), Brushes.Black, new Point(20, y));
e.Graphics.DrawString(grvTienPhong.Rows[i].Cells[0].Value.ToString(), new Font("Arial", 13, FontStyle.Regular), Brushes.Black, new Point(90, y));
e.Graphics.DrawString((int.Parse(grvTienPhong.Rows[i].Cells[1].Value.ToString())).ToString("#,###", cul.NumberFormat), new Font("Arial", 13, FontStyle.Regular), Brushes.Black, new Point(190, y));
e.Graphics.DrawString(DateTime.Parse(grvTienPhong.Rows[i].Cells[2].Value.ToString()).ToShortDateString(), new Font("Arial", 13, FontStyle.Regular), Brushes.Black, new Point(300, y));
e.Graphics.DrawString(DateTime.Parse(grvTienPhong.Rows[i].Cells[3].Value.ToString()).ToShortDateString(), new Font("Arial", 13, FontStyle.Regular), Brushes.Black, new Point(420, y));
e.Graphics.DrawString(grvTienPhong.Rows[i].Cells[4].Value.ToString(), new Font("Arial", 13, FontStyle.Regular), Brushes.Black, new Point(540, y));
e.Graphics.DrawString(int.Parse(grvTienPhong.Rows[i].Cells[5].Value.ToString()).ToString("#,###", cul.NumberFormat), new Font("Arial", 13, FontStyle.Regular), Brushes.Black, new Point(690, y));
y += 30;
}
y += 40;
//vẽ thông tin dịch vụ
e.Graphics.DrawString("STT", new Font("Arial", 12, FontStyle.Bold), Brushes.Black, new Point(20, y));
e.Graphics.DrawString("Dịch Vụ", new Font("Arial", 12, FontStyle.Bold), Brushes.Black, new Point(120, y));
e.Graphics.DrawString("Số Lượng", new Font("Arial", 12, FontStyle.Bold), Brushes.Black, new Point(290, y));
e.Graphics.DrawString("Ngày sử dụng", new Font("Arial", 12, FontStyle.Bold), Brushes.Black, new Point(410, y));
e.Graphics.DrawString("giá(VNĐ)", new Font("Arial", 12, FontStyle.Bold), Brushes.Black, new Point(542, y));
e.Graphics.DrawString("Thành Tiền(VNĐ)", new Font("Arial", 12, FontStyle.Bold), Brushes.Black, new Point(690, y));
e.Graphics.DrawString("-----------------------------------------------------------------------------------------------------------------------------------------------------------------------"
, new Font("Arial", 10, FontStyle.Regular), Brushes.Black, new Point(20, y += 20));//y=y+20 set
y += 30;
for (int i = 0; i < grvTTDV.Rows.Count; i++)
{
e.Graphics.DrawString((i + 1).ToString(), new Font("Arial", 13, FontStyle.Regular), Brushes.Black, new Point(20, y));
e.Graphics.DrawString(grvTTDV.Rows[i].Cells[0].Value.ToString(), new Font("Arial", 13, FontStyle.Regular), Brushes.Black, new Point(120, y));
e.Graphics.DrawString(grvTTDV.Rows[i].Cells[1].Value.ToString(), new Font("Arial", 13, FontStyle.Regular), Brushes.Black, new Point(300, y));
e.Graphics.DrawString(DateTime.Parse(grvTTDV.Rows[i].Cells[2].Value.ToString()).ToShortDateString(), new Font("Arial", 13, FontStyle.Regular), Brushes.Black, new Point(415, y));
e.Graphics.DrawString((int.Parse(grvTTDV.Rows[i].Cells[3].Value.ToString())).ToString("#,###", cul.NumberFormat), new Font("Arial", 13, FontStyle.Regular), Brushes.Black, new Point(545, y));
e.Graphics.DrawString(int.Parse(grvTTDV.Rows[i].Cells[4].Value.ToString()).ToString("#,###", cul.NumberFormat), new Font("Arial", 13, FontStyle.Regular), Brushes.Black, new Point(690, y));
y += 30;
}
e.Graphics.DrawString("--------------------------------------------------------------------------------"
, new Font("Arial", 10, FontStyle.Regular), Brushes.Black, new Point(400, y += 20));//y=y+20 set
//vẽ thông tin tiền
e.Graphics.DrawString("Tiền Phòng: " + lblTienPhong.Text, new Font("Arial", 15, FontStyle.Regular), Brushes.Black, new Point(400, y += 40));
e.Graphics.DrawString("Tiền Dịch Vụ: " + lblTienDV.Text, new Font("Arial", 15, FontStyle.Regular), Brushes.Black, new Point(400, y += 40));
e.Graphics.DrawString("Tổng Tiền: " + lblTongTien.Text, new Font("Arial", 15, FontStyle.Regular), Brushes.Black, new Point(400, y += 40));
e.Graphics.DrawString("Trả Trước: " + lblTratruoc.Text, new Font("Arial", 15, FontStyle.Regular), Brushes.Black, new Point(400, y += 40));
e.Graphics.DrawString("Còn Lại: " + lblConlai.Text, new Font("Arial", 15, FontStyle.Regular), Brushes.Black, new Point(400, y += 40));
e.Graphics.DrawString("Cảm ơn và hẹn gặp lại!!!", new Font("Arial", 14, FontStyle.Regular), Brushes.Orange, new Point(400, y += 35));
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Data;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using QuanLiKhachSan.DTO;
namespace QuanLiKhachSan.DAO
{
public class KhachHangDAO_huy
{
private static KhachHangDAO_huy instances;
public static KhachHangDAO_huy Instances { get {
if (instances == null) instances = new KhachHangDAO_huy();return instances;
} set => instances = value; }
public KhachHangDAO_huy() { }
public bool ThemKhachHangVaoDanhSach(string tenKH, string gioiTinh, DateTime ngaySinh, string diaChi, int soDienThoai,string cmt)
{
string query = "execute ThemKhachHangVaoDanhSach @TENKH , @GIOITINH , @NGAYSINH , @DIACHI , @SODIENTHOAI , @CHUNGMINHTHU";
DataProvider.Instance.ExecuteQuery(query,new object[] { tenKH, gioiTinh, ngaySinh, diaChi, soDienThoai,cmt });
return true;
}
public bool SuaKhachHangVaoDanhSach(string makh,string tenKH, string gioiTinh, DateTime ngaySinh, string diaChi, int soDienThoai, string cmt)
{
string query = "execute SuaKhachHang @makh , @TENKH , @GIOITINH , @NGAYSINH , @DIACHI , @SODIENTHOAI , @CHUNGMINHTHU";
DataProvider.Instance.ExecuteQuery(query, new object[] {makh, tenKH, gioiTinh, ngaySinh, diaChi, soDienThoai, cmt });
return true;
}
public KhachHang_huy TimKHTheoMAKH(string maKH)
{
string query = "execute TimKH @id";
DataTable dataTable = DataProvider.Instance.ExecuteQuery(query, new object[] { maKH });
if (dataTable.Rows.Count == 0) return null;
KhachHang_huy khachHang_Huy = new KhachHang_huy(dataTable.Rows[0]);
return khachHang_Huy;
}
public KhachHang_huy TimKHTheoCMND(string CMNDKH)
{
string query = "execute TimKHTheoCMND @cmnd";
DataTable dataTable = DataProvider.Instance.ExecuteQuery(query, new object[] { CMNDKH });
if (dataTable.Rows.Count == 0) return null;
KhachHang_huy khachHang_Huy = new KhachHang_huy(dataTable.Rows[0]);
return khachHang_Huy;
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace QuanLiKhachSan.DTO
{
public class EC_CHUCVU
{
private string _MaChucVu;
private string _TenChucVu;
public string MaChucVu { get => _MaChucVu; set => _MaChucVu = value; }
public string TenChucVu { get => _TenChucVu; set => _TenChucVu = value; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Data;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace QuanLiKhachSan.DTO
{
public class Phong
{
private float giaPhong;
private string donViTienTe;
private string maLoaiPhong;
private int tinhTrangPhong;
private string tenPhong;
private string mA;
private object item;
public string MA { get => mA; set => mA = value; }
public string TenPhong { get => tenPhong; set => tenPhong = value; }
public int TinhTrangPhong { get => tinhTrangPhong; set => tinhTrangPhong = value; }
public string MaLoaiPhong { get => maLoaiPhong; set => maLoaiPhong = value; }
public float GiaPhong { get => giaPhong; set => giaPhong = value; }
public string DonViTienTe { get => donViTienTe; set => donViTienTe = value; }
public Phong(string mA, string tenPhong, int tinhTrangPhong, string maLoaiPhong, float giaPhong, string donViTienTe) {
this.MA = mA;
this.TenPhong = tenPhong;
this.TinhTrangPhong = tinhTrangPhong;
this.MaLoaiPhong = maLoaiPhong;
GiaPhong = giaPhong;
DonViTienTe = donViTienTe;
}
public Phong(DataRow row)
{
MA = row["MAPHONG"].ToString();
TenPhong = row["tenphong"].ToString();
TinhTrangPhong = (int)row["tinhtrangphong"];
MaLoaiPhong = row["maloaiphong"].ToString();
GiaPhong = float.Parse(row["giaphong"].ToString());
DonViTienTe = row["donvitiente"].ToString();
}
public Phong(object item)
{
this.item = item;
}
}
}// de t them cho cai nao ko phai m di chuyen chuot//// chua xoa may cai kia ma<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Data;
using System.Threading.Tasks;
namespace QuanLiKhachSan.DTO
{
public class KHACHHANG
{
public string MAKH {get;set;}
public string TENKH {get;set;}
public string GIOITINH {get;set;}
public DateTime NGAYSINH {get;set;}
public string DIACHI {get;set;}
public string SODIENTHOAI { get; set; }
public string CMND { get; set; }
//public KHACHHANG(string makh,string tenkh, bool gioitinh, DateTime ngaysinh, string diachi, string sodienthoai)
// {
// this.MAKH = makh;
// this.TENKH = tenkh;
// this.GIOITINH = gioitinh;
// this.NGAYSINH = ngaysinh;
// this.DIACHI = diachi;
// this.SODIENTHOAI = sodienthoai;
// }
public KHACHHANG(DataRow row)
{
this.MAKH = row["MAKH"].ToString();
this.TENKH = row["TENKH"].ToString();
this.GIOITINH =row["GIOITINH"].ToString();
this.NGAYSINH = (DateTime)row["NGAYSINH"];
this.DIACHI = row["DIACHI"].ToString();
this.SODIENTHOAI = row["SODIENTHOAI"].ToString();
this.CMND = row["CHUNGMINHTHU"].ToString();
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Data;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace QuanLiKhachSan.DTO
{
public class KhachHang_huy
{
private string maKH;
private string tenKH;
private string gioiTinh;
private DateTime ngaySinh;
private string diaChi;
private string chungMinhThu;
private int soDienThoai;
private object item;
public string MaKH { get => maKH; set => maKH = value; }
public string TenKH { get => tenKH; set => tenKH = value; }
public string GioiTinh { get => gioiTinh; set => gioiTinh = value; }
public DateTime NgaySinh { get => ngaySinh; set => ngaySinh = value; }
public string DiaChi { get => diaChi; set => diaChi = value; }
public int SoDienThoai { get => soDienThoai; set => soDienThoai = value; }
public string ChungMinhThu { get => chungMinhThu; set => chungMinhThu = value; }
public KhachHang_huy(string maKH,string tenKH,string gioiTinh, DateTime ngaySinh, string diaChi, int soDienThoai,string chungMinhThu)
{
MaKH = maKH;
TenKH = tenKH;
GioiTinh = gioiTinh;
NgaySinh = ngaySinh;
DiaChi = diaChi;
SoDienThoai = soDienThoai;
ChungMinhThu = chungMinhThu;
}
public KhachHang_huy() { }
public KhachHang_huy(DataRow row)
{
MaKH = row["MaKH"].ToString();
TenKH = row["TenKH"].ToString();
GioiTinh = row["GioiTinh"].ToString();
if(row["NgaySinh"].ToString()!="")
NgaySinh = (DateTime)row["NgaySinh"];
DiaChi = row["DiaChi"].ToString();
SoDienThoai = (int)row["SODIENTHOAI"];
ChungMinhThu = row["ChungMinhThu"].ToString();
}
public KhachHang_huy(object item)
{
this.item = item;
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using QuanLiKhachSan.DTO;
using System.Data;
namespace QuanLiKhachSan.DAO
{
public class NhanVienDAO
{
private static NhanVienDAO instances;
public NhanVienDAO() { }
public static NhanVienDAO Instances
{
get
{
if (instances == null) instances = new NhanVienDAO(); return instances;
}
set => instances = value;
}
public bool ThemNhanVien(EC_NHANVIEN ec)
{
string query = " execute ThemNhanVien_thinh @TENNHANVIEN , @GIOITINH , @NGAYSINH , @DIACHI , @MACHUCVU , @SODIENTHOAI";
DataProvider.Instance.ExecuteQuery(query, new object[] {ec.TenNhanVien,ec.GioiTinh,ec.NgaySinh,ec.DiaChi,ec.MaChucVu,ec.SDT});
return true;
}
public bool ThemTK(EC_TAIKHOAN ec)
{
string query = "execute ThemTK_thinh @PASS , @MACHUCVU , @MANHANVIEN";
DataProvider.Instance.ExecuteQuery(query, new object[] { ec.MatKhau,ec.MaChucVu,ec.MaNhanVien});
return true;
}
public bool XoaNhanVien(EC_NHANVIEN ec)
{
string query = "execute XoaNhanVien_thinh @Manv";
DataProvider.Instance.ExecuteQuery(query, new object[] {ec.MaNhanVien });
return true;
}
public bool SuaNhanVien(EC_NHANVIEN ec)
{
string query = " execute SuaNhanVien_thinh @MANHANVIEN , @TENNHANVIEN , @GIOITINH , @NGAYSINH , @DIACHI , @MACHUCVU , @SODIENTHOAI";
DataProvider.Instance.ExecuteQuery(query, new object[] {ec.MaNhanVien, ec.TenNhanVien, ec.GioiTinh, ec.NgaySinh, ec.DiaChi, ec.MaChucVu, ec.SDT });
return true;
}
public bool SuaTK(EC_TAIKHOAN ec)
{
string query = "EXECUTE SuaTK_thinh @PASS , @MANHANVIEN";
DataProvider.Instance.ExecuteQuery(query, new object[] { ec.MatKhau,ec.MaNhanVien });
return true;
}
public DataTable Taobang(string Dieukien)
{
string query = " SELECT NHANVIEN.MANHANVIEN, NHANVIEN.SODIENTHOAI, NHANVIEN.TENNHANVIEN, NHANVIEN.GIOITINH, NHANVIEN.NGAYSINH, NHANVIEN.DIACHI, TAIKHOAN.TENTAIKHOAN, TAIKHOAN.PASS, CHUCVU.TENCHUCVU FROM NHANVIEN INNER JOIN TAIKHOAN ON NHANVIEN.MANHANVIEN = TAIKHOAN.MANHANVIEN INNER JOIN CHUCVU ON NHANVIEN.MACHUCVU = CHUCVU.MACHUCVU AND TAIKHOAN.MACHUCVU = CHUCVU.MACHUCVU" + Dieukien;
DataTable dt = DataProvider.Instance.ExecuteQuery(query, new object[] { });
return dt;
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Data;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace QuanLiKhachSan.DAO
{
public class TaiKhoanDAO
{
private static TaiKhoanDAO instance;
public static TaiKhoanDAO Instance {
get
{
if (instance == null) instance = new TaiKhoanDAO(); return instance;
}
private set
{
instance = value;
}
}
private TaiKhoanDAO() { }
public bool Login(string tentaikhoan,string matkhau)
{
//'OR 1=1 --D
string query = "EXECUTE DBO.USP_LOGIN @TENTAIKHOAN , @MATKHAU";
DataTable check = DataProvider.Instance.ExecuteQuery(query,new object[]{tentaikhoan , matkhau });
return check.Rows.Count>0;
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Data;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace QuanLiKhachSan.DTO
{
public class KiemTraSuThayDoiDuLieu
{
private int chucVu;
private int datDichVu;
private int datPhong;
private int dichVu;
private int hoaDon;
private int khachHang;
private int lichLamViec;
private int loaiPhong;
private int nhanVien;
private int phong;
private int taiKhoan;
private int chiTietHoaDon;
public int ChucVu { get => chucVu; set => chucVu = value; }
public int DatDichVu { get => datDichVu; set => datDichVu = value; }
public int DatPhong { get => datPhong; set => datPhong = value; }
public int DichVu { get => dichVu; set => dichVu = value; }
public int HoaDon { get => hoaDon; set => hoaDon = value; }
public int KhachHang { get => khachHang; set => khachHang = value; }
public int LichLamViec { get => lichLamViec; set => lichLamViec = value; }
public int LoaiPhong { get => loaiPhong; set => loaiPhong = value; }
public int NhanVien { get => nhanVien; set => nhanVien = value; }
public int Phong { get => phong; set => phong = value; }
public int TaiKhoan { get => taiKhoan; set => taiKhoan = value; }
public int ChiTietHoaDon { get => chiTietHoaDon; set => chiTietHoaDon = value; }
public KiemTraSuThayDoiDuLieu(DataRow row)
{
ChiTietHoaDon = int.Parse(row["CHITIETHOADON"].ToString());
ChucVu = int.Parse(row["CHUCVU"].ToString());
DatDichVu = int.Parse(row["DatDichVu"].ToString());
DatPhong = int.Parse(row["DatPhong"].ToString());
DichVu = int.Parse(row["DichVu"].ToString());
HoaDon = int.Parse(row["HOADON"].ToString());
KhachHang = int.Parse(row["KHACHHANG"].ToString());
LichLamViec = int.Parse(row["LICHLAMVIEC"].ToString());
LoaiPhong = int.Parse(row["LOAIPHONG"].ToString());
NhanVien = int.Parse(row["NHANVIEN"].ToString());
Phong = int.Parse(row["PHONG"].ToString());
TaiKhoan = int.Parse(row["TAIKHOAN"].ToString());
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace QuanLiKhachSan.DTO
{
public class EC_LichLamViec
{
private string _MaLichLamViec;
private DateTime _NgayLamViec;
private string _Buoi;
private string _MaNhanVien;
public string MaLichLamViec { get => _MaLichLamViec; set => _MaLichLamViec = value; }
public DateTime NgayLamViec { get => _NgayLamViec; set => _NgayLamViec = value; }
public string Buoi { get => _Buoi; set => _Buoi = value; }
public string MaNhanVien { get => _MaNhanVien; set => _MaNhanVien = value; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace QuanLiKhachSan.DTO
{
public class EC_KHACHHANG
{
private string _MaKH;
private string _TenKH;
private DateTime _Ngaysinh;
private string _GioiTinh;
private string _DiaChi;
private string _SDT;
private string _SOCMT;
public string MaKH { get => _MaKH; set => _MaKH = value; }
public string TenKH { get => _TenKH; set => _TenKH = value; }
public DateTime Ngaysinh { get => _Ngaysinh; set => _Ngaysinh = value; }
public string GioiTinh { get => _GioiTinh; set => _GioiTinh = value; }
public string DiaChi { get => _DiaChi; set => _DiaChi = value; }
public string SDT { get => _SDT; set => _SDT = value; }
public string SOCMT { get => _SOCMT; set => _SOCMT = value; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
using QuanLiKhachSan.DAO;
using System.Data.Sql;
using System.Data.SqlClient;
namespace QuanLiKhachSan
{
public partial class fLogin : Form
{
private static string laytenserver;
public static fLogin instances;
private static string taiKhoan;
private static string matKhau;
public static string tentk { get; set; }
public fLogin Instances {
get
{
if (instances == null) instances = new fLogin(); return instances;
}
set
{
instances = value;
}
}
public static string TaiKhoan { get => taiKhoan; set => taiKhoan = value; }
public static string MatKhau { get => matKhau; set => matKhau = value; }
public static string Laytenserver { get => laytenserver; set => laytenserver = value; }
public fLogin()
{
InitializeComponent();
}
private void btnDangNhap_Click(object sender, EventArgs e)
{
if (txtTenSV.Text == "")
{
MessageBox.Show("Ban Chua Nhap SeverName");
return;
}
Laytenserver = txtTenSV.Text;
string a = txtDangNhap.Text;
string b = txtMatKhau.Text;
TaiKhoan = txtDangNhap.Text;
MatKhau = txtMatKhau.Text;
if (kiemtraketnoiok() == false)
{
MessageBox.Show("Bạn Đã Nhập Sai Tên Tài Khoản Hoặc Mật Khẩu Hoặc SeverName");
return;
}
if (Login(a,b))
{
fManager phanmem = new fManager();
this.Hide();
tentk = txtDangNhap.Text;
phanmem.ShowDialog();
this.Show();
}
else
{
MessageBox.Show("Bạn Đã Nhập Sai Tên Tài Khoản Hoặc Mật Khẩu");
}
}
bool kiemtraketnoiok()
{
string sever = Laytenserver;
string id = TaiKhoan;
string matkhau = MatKhau;
string ConnectSTR = @"Server=" + @sever + ";Database=QuanLiKhachSan;User Id=" + id + ";Password =" + <PASSWORD>;
SqlConnection connection = new SqlConnection(ConnectSTR);
try
{
connection.Open();
}
catch
{
connection.Close();
return false;
}
return true;
}
bool Login(string tentaikhoan,string matkhau)
{
return TaiKhoanDAO.Instance.Login(tentaikhoan, matkhau);
}
private void btnThoat_Click(object sender, EventArgs e)
{
Application.Exit();
}
private void fLogin_FormClosing(object sender, FormClosingEventArgs e)
{
if (MessageBox.Show("Bạn có thật sự muốn thoát chương trình?", "Thông báo", MessageBoxButtons.OKCancel) != System.Windows.Forms.DialogResult.OK)
{
e.Cancel = true;
}
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Data;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace QuanLiKhachSan.DTO
{
public class LoaiPhong
{
private object item;
private string mA;
private string tenLoaiPhong;
private string thietBi;
public string MA { get => mA;private set => mA = value; }
public string TenLoaiPhong { get => tenLoaiPhong;private set => tenLoaiPhong = value; }
public string ThietBi { get => thietBi;private set => thietBi = value; }
public LoaiPhong(string mA,string tenLoaiPhong,string thietBi)
{
this.MA = mA;
this.TenLoaiPhong = tenLoaiPhong;
this.ThietBi = thietBi;
}
public LoaiPhong(DataRow row)
{
this.MA = row["MALOAIPHONG"].ToString();
this.TenLoaiPhong = row["TENLOAIPHONG"].ToString();
this.ThietBi = row["THIETBI"].ToString();
}
public LoaiPhong(object item)
{
this.item = item;
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Data;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace QuanLiKhachSan.DTO
{
class HOADONDTO
{
public string MAHD {get;set;}
public DateTime NGAYTHANHTOAN {get;set;}
public string MAKH {get;set;}
public string MANHANVIEN {get;set;}
public float TIENPHONG { get; set; }
public float TIENDV { get; set; }
public HOADONDTO(DataRow row)
{
this.MAHD = row["MAHD"].ToString();
this.NGAYTHANHTOAN =(DateTime) row["NGAYTHANHTOAN"];
this.MAKH = row["MAKH"].ToString();
this.MANHANVIEN = row["MANHANVIEN"].ToString();
this.TIENPHONG = (float)row["TIENPHONG"];
this.TIENPHONG = (float)row["TIENDV"];
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace QuanLiKhachSan.DAO
{
public class EC_NHANVIEN
{
private string _MaNhanVien;
private string _TenNhanVien;
private string _GioiTinh;
private string _MaChucVu;
private string _SDT;
private DateTime _NgaySinh;
private string _DiaChi;
public string MaNhanVien { get => _MaNhanVien; set => _MaNhanVien = value; }
public string TenNhanVien { get => _TenNhanVien; set => _TenNhanVien = value; }
public string GioiTinh { get => _GioiTinh; set => _GioiTinh = value; }
public string MaChucVu { get => _MaChucVu; set => _MaChucVu = value; }
public string SDT { get => _SDT; set => _SDT = value; }
public DateTime NgaySinh { get => _NgaySinh; set => _NgaySinh = value; }
public string DiaChi { get => _DiaChi; set => _DiaChi = value; }
}
}<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Data.SqlClient;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
using QuanLiKhachSan.DAO;
using QuanLiKhachSan.DTO;
namespace QuanLiKhachSan
{
public partial class fDanhSachNhanVien : Form
{
public fDanhSachNhanVien()
{
InitializeComponent();
}
EC_NHANVIEN ec = new EC_NHANVIEN();
EC_TAIKHOAN tk = new EC_TAIKHOAN();
public void Hienthi(string where)
{
grvDanhSachNhanVien.DataSource = NhanVienDAO.Instances.Taobang(where);
}
void setnull()
{
txtManv.Text = "";
txtTennv.Text = "";
cbGioitinh.Text = "Nam";
cbMaChucvu.Text = "";
txtDaichi.Text = "";
txtSDT.Text = "";
txtTK.Text = "";
txtMK.Text = "";
}
private void btnThem_Click(object sender, EventArgs e)
{
txtManv.Text = "";
txtTK.Text = "";
if (txtTennv.Text == "" || cbMaChucvu.Text == "")
{
MessageBox.Show("Hãy nhập đầy đủ thông tin!!!");
return;
}
else
{
try
{
ec.MaNhanVien = txtManv.Text;
ec.TenNhanVien = txtTennv.Text;
ec.NgaySinh = dtNgaysinh.Value;
ec.SDT = txtSDT.Text;
ec.GioiTinh = cbGioitinh.Text;
ec.MaChucVu = cbMaChucvu.Text;
ec.DiaChi = txtDaichi.Text;
tk.MatKhau = txtMK.Text;
tk.MaChucVu = cbMaChucvu.Text;
NhanVienDAO.Instances.ThemNhanVien(ec);
string query = " execute LayMANV_max";
DataTable dt = DataProvider.Instance.ExecuteQuery(query, new object[] { });
string ma = dt.Rows[0][0].ToString().Trim();
tk.MaNhanVien = ma;
NhanVienDAO.Instances.ThemTK(tk);
//chạy dât chưa AF CHWAdk m
MessageBox.Show("Thực hiện thành công!!!");
}
catch
{
MessageBox.Show("Lỗi!!!");
return;
}
}
setnull();
Hienthi("");
}
private void btnXoa_Click(object sender, EventArgs e)
{
if(txtManv.Text == "")
{
MessageBox.Show("Nhập hoặc chọn mã nhân viên cần xóa!!!");
return;
}
else
{
try
{
ec.MaNhanVien = txtManv.Text;
NhanVienDAO.Instances.XoaNhanVien(ec);
MessageBox.Show("Xóa thành công!!!");
}
catch
{
MessageBox.Show("Lỗi!!!");
return;
}
}
setnull();
Hienthi("");
}
private void btnSua_Click(object sender, EventArgs e)
{
if (txtManv.Text == "")
{
MessageBox.Show("Hãy chọn hàng cần sửa!!!");
return;
}
else
{
try
{
ec.MaNhanVien = txtManv.Text;
ec.TenNhanVien = txtTennv.Text;
ec.NgaySinh = dtNgaysinh.Value;
ec.SDT = txtSDT.Text;
ec.GioiTinh = cbGioitinh.Text;
ec.MaChucVu = cbMaChucvu.Text;
ec.DiaChi = txtDaichi.Text;
tk.MaNhanVien = txtManv.Text;
tk.TenTaiKhoan = txtTK.Text;
tk.MatKhau = txtMK.Text;
NhanVienDAO.Instances.SuaTK(tk);
NhanVienDAO.Instances.SuaNhanVien(ec);
MessageBox.Show("Thực hiện thành công!!!");
}
catch
{
MessageBox.Show("Lỗi!!!");
return;
}
}
setnull();
Hienthi("");
}
private void txtSDT_KeyPress(object sender, KeyPressEventArgs e)
{
if (!Char.IsDigit(e.KeyChar) && !Char.IsControl(e.KeyChar))
e.Handled = true;
}
private void grvDanhSachNhanVien_RowHeaderMouseClick(object sender, DataGridViewCellMouseEventArgs e)
{
txtManv.Text = grvDanhSachNhanVien.SelectedRows[0].Cells[0].Value.ToString();
txtTennv.Text = grvDanhSachNhanVien.SelectedRows[0].Cells[2].Value.ToString();
cbGioitinh.Text = grvDanhSachNhanVien.SelectedRows[0].Cells[3].Value.ToString();
dtNgaysinh.Text = grvDanhSachNhanVien.SelectedRows[0].Cells[4].Value.ToString();
txtDaichi.Text = grvDanhSachNhanVien.SelectedRows[0].Cells[5].Value.ToString();
string Tenchucvu = grvDanhSachNhanVien.SelectedRows[0].Cells[8].Value.ToString();
if (Tenchucvu == "NHANVIEN")
{
cbMaChucvu.Text = "CV0001";
}
else cbMaChucvu.Text = "CV0002";
txtSDT.Text = grvDanhSachNhanVien.SelectedRows[0].Cells[1].Value.ToString();
txtTK.Text = grvDanhSachNhanVien.SelectedRows[0].Cells[6].Value.ToString();
txtMK.Text = grvDanhSachNhanVien.SelectedRows[0].Cells[7].Value.ToString();
}
public void fDanhSachNhanVien_Load(object sender, EventArgs e)
{
Hienthi("");
setnull();
txtManv.Enabled = false;
txtTK.Enabled = false;
string query = " SELECT MACHUCVU FROM CHUCVU";
DataTable dt = DataProvider.Instance.ExecuteQuery(query, new object[] { });
for (int i = 0; i < dt.Rows.Count; i++)
{
cbMaChucvu.Items.Add(dt.Rows[i]["MACHUCVU"].ToString().Trim());
}
}
private void txtTennv_KeyPress(object sender, KeyPressEventArgs e)
{
e.Handled = e.KeyChar != (char)Keys.Back && !char.IsSeparator(e.KeyChar) && !char.IsLetter(e.KeyChar) && !char.IsDigit(e.KeyChar);
}
private void txtDaichi_KeyPress(object sender, KeyPressEventArgs e)
{
e.Handled = e.KeyChar != (char)Keys.Back && !char.IsSeparator(e.KeyChar) && !char.IsLetter(e.KeyChar) && !char.IsDigit(e.KeyChar);
}
private void cbGioitinh_KeyPress(object sender, KeyPressEventArgs e)
{
e.Handled = e.KeyChar != (char)Keys.Back && !char.IsSeparator(e.KeyChar) && !char.IsLetter(e.KeyChar) && !char.IsDigit(e.KeyChar);
}
private void txtTK_KeyPress(object sender, KeyPressEventArgs e)
{
e.Handled = e.KeyChar != (char)Keys.Back && !char.IsSeparator(e.KeyChar) && !char.IsLetter(e.KeyChar) && !char.IsDigit(e.KeyChar);
}
private void txtMK_KeyPress(object sender, KeyPressEventArgs e)
{
e.Handled = e.KeyChar != (char)Keys.Back && !char.IsSeparator(e.KeyChar) && !char.IsLetter(e.KeyChar) && !char.IsDigit(e.KeyChar);
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Data;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using QuanLiKhachSan.DTO;
using System.Data.SqlClient;
namespace QuanLiKhachSan.DAO
{
public class KhachHangDAO_thinh
{
private static KhachHangDAO_thinh instances;
public static KhachHangDAO_thinh Instances { get {
if (instances == null) instances = new KhachHangDAO_thinh();return instances;
} set => instances = value; }
public KhachHangDAO_thinh() { }
public bool ThemKhachHangVaoDanhSach(EC_KHACHHANG ec, string insert)
{
string query = insert;
DataProvider.Instance.ExecuteQuery(query,new object[] { ec.TenKH,ec.GioiTinh,ec.Ngaysinh,ec.DiaChi,ec.SDT,ec.SOCMT });
return true;
}
public DataTable Taobang(string Dieukien)
{
string query = Dieukien;
DataTable dt = DataProvider.Instance.ExecuteQuery(query, new object[] { });
return dt;
}
public bool XoaKhachHang(EC_KHACHHANG ec)
{
string query = "execute XoaKhachHang @Makh";
DataProvider.Instance.ExecuteQuery(query, new object[] { ec.MaKH });
return true;
}
public bool SuaKhachHangVaoDanhSach(EC_KHACHHANG ec)
{
string query = " execute SuaKhachHang @makh , @TENKH , @GIOITINH , @NGAYSINH , @DIACHI , @SODIENTHOAI , @CHUNGMINHTHU ";
DataProvider.Instance.ExecuteQuery(query, new object[] {ec.MaKH,ec.TenKH,ec.GioiTinh,ec.Ngaysinh,ec.DiaChi,ec.SDT,ec.SOCMT });
return true;
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
using QuanLiKhachSan.DAO;
using QuanLiKhachSan.DTO;
namespace QuanLiKhachSan
{
public partial class fDatPhong : Form
{
public fDatPhong()
{
InitializeComponent();
}
private void LoadDanhSachDatPhongVaocbb()
{
List<DatPhong> danhsachdatphong = DatPhongDAO.Instances.HienThiDanhSachDatPhongVaocbb();
cbbMaDatPhong.DataSource = danhsachdatphong;
cbbMaDatPhong.DisplayMember = "MaDatPhong";
}
private void LoadTenPhongTheoLoaiPhong(string maLoaiPhong)
{
List<Phong> danhsachphongtheoma = PhongDAO.Instances.LoadDanhSachTheoMavaTinhTrang(maLoaiPhong);
cbbChonPhong.DataSource = danhsachphongtheoma;
txtGiaPhong.Text = (cbbChonPhong.SelectedItem as Phong).GiaPhong.ToString();
cbbChonPhong.DisplayMember = "MA";
}
private void LoadLoaiPhongVaocbb()
{
List<LoaiPhong> danhsachloaiphong = LoaiPhongDAO.Instances.DanhSachLoaiPhong();
cbbLoaiPhong.DataSource = danhsachloaiphong;
cbbLoaiPhong.DisplayMember = "TenLoaiPhong";
}
private void LoadDanhSachThongKeTinhTrangPhong()
{
flpThongKe.Controls.Clear();
List<TableHienThiThongKeTinhTrangPhong> DanhSachThongKe = PhongDAO.Instances.LoadDanhSachThongKePhong();
foreach (var item in DanhSachThongKe)
{
Button hienthithongke = new Button() { Height = 70, Width = 90 };
flpThongKe.Controls.Add(hienthithongke);
flpThongKe.AutoScroll = true;
int a = 10;
if (item.TinhTrangPhong == 1)
{
hienthithongke.Text = "Phòng Trống" + " Số Lượng " + item.SoLuong.ToString();
hienthithongke.BackColor = Color.White;
}
if (item.TinhTrangPhong == 2)
{
hienthithongke.Text = "Phòng được đặt ở tương lai \n Số Lượng:" + item.SoLuong.ToString();
hienthithongke.BackColor = Color.Red;
}
if (item.TinhTrangPhong == 3)
{
hienthithongke.Text = "Phòng Có Người Đặt \n Hoặc Đang Ở \n Số Lượng:" + item.SoLuong.ToString();
hienthithongke.BackColor = Color.LightSlateGray;
}
}
}
private void LoadDanhSachPhong()
{
List<Phong> danhSachPhongs = PhongDAO.Instances.LoadDanhSach();
grVDanhSachPhong.DataSource = danhSachPhongs;
}
private void LoadDanhSachDatPhong()
{
List<DatPhong> danhsachdatphong = DatPhongDAO.Instances.HienThiDanhSachDatPhong();
grVDatPhong.DataSource = danhsachdatphong;
}
private void btnLuu_Click(object sender, EventArgs e)
{
string tenKh = txtTenKhachHang.Text;
string gioiTinh = "";
string cmt = txtCMT.Text;
KhachHang_huy khachHang = KhachHangDAO_huy.Instances.TimKHTheoCMND(txtCMT.Text);
if (khachHang != null)
{
MessageBox.Show("Đã Tồn Tại CMT " + txtCMT.Text + " Trong Hệ Thống");
return;
}
if (chkBNam.Checked)
{
gioiTinh = "Nam";
}
if (chkNu.Checked)
{
gioiTinh = "Nu";
}
DateTime ngaySinh = dateTimePicker1.Value;
string diaChi = txtDiaChi.Text;
int a = 10;
int soDienThoai = 0;
if (int.TryParse(txtSoDienThoai.Text, out a))
{
soDienThoai = int.Parse(txtSoDienThoai.Text);
}
else
{
MessageBox.Show("bạn phải nhập số điện thoại đúng dạng số");
return;
}
if (KhachHangDAO_huy.Instances.ThemKhachHangVaoDanhSach(tenKh, gioiTinh, ngaySinh, diaChi, soDienThoai, cmt))
{
MessageBox.Show("Lưu Khách Hàng " + tenKh + " thành công");
}
}
private void cbbLoaiPhong_SelectedIndexChanged(object sender, EventArgs e)
{
string maLoaiPhong;
ComboBox cb = sender as ComboBox;
if (cb.SelectedItem == null) return;
LoaiPhong loaiPhong = cb.SelectedItem as LoaiPhong;
maLoaiPhong = loaiPhong.MA;
LoadTenPhongTheoLoaiPhong(maLoaiPhong);
}
private void cbbChonPhong_SelectedIndexChanged(object sender, EventArgs e)
{
txtGiaPhong.Clear();
txtGiaPhong.Text = (cbbChonPhong.SelectedItem as Phong).GiaPhong.ToString();
}
private string maphong;
private void LoadPhongVaocbbPhong(int b, string maloaiphong)
{
List<Phong> danhsachphongtheoloaiphong = PhongDAO.Instances.LoadDanhSachTheoMaLoaiPhong(maloaiphong);
List<Phong> danhsachphongtheomaphong = PhongDAO.Instances.LoadDanhSachTheoMaPhong(maphong);
txtGiaPhong.Text = danhsachphongtheomaphong[0].GiaPhong.ToString();
cbbChonPhong.DataSource = danhsachphongtheoloaiphong;
cbbChonPhong.SelectedIndex = b;
cbbChonPhong.DisplayMember = "MA";
}
private void LoadLoaiPhongVaocbbLoaiPhong(int a)
{
List<LoaiPhong> danhsachloaiphong = LoaiPhongDAO.Instances.DanhSachLoaiPhong();
cbbLoaiPhong.DataSource = danhsachloaiphong;
cbbLoaiPhong.SelectedIndex = a;
cbbLoaiPhong.DisplayMember = "TENLOAIPHONG";
}
private int TimKiemLoaiPhongVuaChon(string maphong, string maloaiphong)
{
int dem = 0;
List<LoaiPhong> danhsachloaiphong = LoaiPhongDAO.Instances.DanhSachLoaiPhong();
List<Phong> phong = PhongDAO.Instances.LoadDanhSachTheoMaPhong(maphong);
for (int i = 0; i < danhsachloaiphong.Count; i++)
{
if (danhsachloaiphong[i].MA == phong[0].MaLoaiPhong)
{
dem = i;
}
}
return dem;
}
private int TimKiemPhongVuaChon(string maphong, string maloaiphong)
{
int dem = 0;
List<Phong> danhsachphong = PhongDAO.Instances.LoadDanhSachTheoMaLoaiPhong(maloaiphong);
List<Phong> phong = PhongDAO.Instances.LoadDanhSachTheoMaPhong(maphong);
for (int i = 0; i < danhsachphong.Count; i++)
{
if (danhsachphong[i].MA == phong[0].MA)
{
dem = i;
}
}
return dem;
}
private void btnDatPhong_Click(object sender, EventArgs e)
{
KhachHang_huy khachHang = KhachHangDAO_huy.Instances.TimKHTheoCMND(txtBChungMinhThu.Text);
if (khachHang == null)
{
MessageBox.Show("Bạn chưa lưu thông tin khách hàng có số chứng minh " + txtBChungMinhThu.Text);
return;
}
string maKH = khachHang.MaKH;
string maPhong = (cbbChonPhong.SelectedItem as Phong).MA;
float a = 100;
float traTruoc;
DateTime ngayDi;
DateTime ngayO = dtpNgayO.Value.Date;
ngayO.Date.ToString("MM/dd/yyyy");
if (dtpNgayDi.Value == dtpNgayDi.MaxDate)
{
ngayDi = dtpNgayDi.MaxDate;
ngayDi.Date.ToString("");
}
else
{
ngayDi = dtpNgayDi.Value.Date;
ngayDi.Date.ToString("MM/dd/yyyy");
}
if (DateTime.Compare(ngayO, ngayDi) > 0)
{
MessageBox.Show("Ngày Đi Không Được Phép Nhỏ Hơn Ngày Ở");
return;
}
if (DateTime.Compare(ngayO.Date, DateTime.Now.Date) < 0)
{
MessageBox.Show("Bạn không thể đặt phòng tại 1 thời điểm ở quá khứ");
return;
}
if (float.TryParse(txtTraTruoc.Text, out a) == false)
{
MessageBox.Show("Bạn Phải Chọn Ngày Đi Ít Nhất 1 Lần");
return;
}
traTruoc = float.Parse(txtTraTruoc.Text);
List<DatPhong> danhsachcacphongdangduocgiu = DatPhongDAO.Instances.HienThiDanhSachDatPhongCoCacPhongDuocGiu();
foreach (var item in danhsachcacphongdangduocgiu)
{
if (item.MaPhong == (cbbChonPhong.SelectedItem as Phong).MA)
{
if (dtpNgayO.Value.Date >= item.NgayO.Date && dtpNgayO.Value.Date <= item.NgayDi.Value.Date
|| dtpNgayDi.Value.Date >= item.NgayO.Date && dtpNgayDi.Value.Date <= item.NgayDi.Value.Date
)
{
MessageBox.Show("Phòng đã bị sử dụng vào thời gian " + item.NgayO.Date + " Tới " + item.NgayDi.Value.Date);
return;
}
}
}
if (dtpNgayO.Value.Date == DateTime.Now.Date && dtpNgayDi.Value.Date == dtpNgayDi.MaxDate.Date)
{
DatPhongDAO.Instances.ThemDatPhongOLien(maPhong, maKH, traTruoc, ngayO, ngayDi);
MessageBox.Show("Đã Đăng Kí, Ở Luôn Và Chưa Biết Ngày Đi");
LoadDanhSachDatPhong();
LoadDanhSachPhong();
LoadTenPhongTheoLoaiPhong((cbbLoaiPhong.SelectedItem as LoaiPhong).MA);
LoadDanhSachDatPhongVaocbb();
return;
}
if (dtpNgayO.Value.Date >= DateTime.Now.Date)
{
if (dtpNgayO.Value.Date != DateTime.Now.Date && dtpNgayDi.Value.Date == dtpNgayDi.MaxDate)
{
MessageBox.Show("Bạn Không Thể Giữ Phòng Khi Không Xác Định Ngày Đi");
return;
}
DatPhongDAO.Instances.ThemDatPhongVaGiuPhong(maPhong, maKH, traTruoc, ngayO, ngayDi);
MessageBox.Show("Đã đăng kí và Giữ Phòng");
LoadDanhSachDatPhong();
LoadDanhSachPhong();
LoadTenPhongTheoLoaiPhong((cbbLoaiPhong.SelectedItem as LoaiPhong).MA);
LoadDanhSachDatPhongVaocbb();
return;
}
}
private void dtpNgayDi_ValueChanged(object sender, EventArgs e)
{
TimeSpan tinhngay;
float a;
if (dtpNgayDi.Value.Date == dtpNgayDi.MaxDate)
{
a = (cbbChonPhong.SelectedItem as Phong).GiaPhong * 30 / 100*5;
txtTraTruoc.Clear();
txtTraTruoc.Text = a.ToString();
return;
}
if (dtpNgayDi.Value.Date == dtpNgayO.Value.Date)
{
a = (cbbChonPhong.SelectedItem as Phong).GiaPhong*30/100;
txtTraTruoc.Clear();
txtTraTruoc.Text = a.ToString();
return;
}
if (dtpNgayDi.Value.Date > dtpNgayO.Value.Date)
{
tinhngay = dtpNgayDi.Value.Date - dtpNgayO.Value.Date;
a = ((cbbChonPhong.SelectedItem as Phong).GiaPhong * tinhngay.Days) * 30 / 100;
txtTraTruoc.Clear();
txtTraTruoc.Text = a.ToString();
}
}
private bool KiemTraPhaiKhachHangVoiMaDPTuongUng(string maKH,string maDP)
{
List<DatPhong> datPhongs = DatPhongDAO.Instances.HienThiDanhSachDatPhong(maDP);
foreach (var item in datPhongs)
{
if (item.MaKH == maKH)
{
return true;
}
}
return false;
}
private bool KiemTraTrungPhong(string MaPhong, string maDP,string maDV,DateTime NgayO,DateTime NgayDi)
{
List<DatPhong> datPhongs = DatPhongDAO.Instances.HienThiDanhSachDatPhong(maDP);
foreach (var item in datPhongs)
{
if (item.MaPhong == MaPhong)
{
return true;
}
}
return false;
}
private bool KiemTraDungNgayO(DateTime ngayO, string maDP)
{
List<DatPhong> datPhongs = DatPhongDAO.Instances.HienThiDanhSachDatPhong(maDP);
foreach (var item in datPhongs)
{
if (item.NgayO.Date == ngayO.Date)
{
return true;
}
}
return false;
}
private void btnXoaDatPhong_Click(object sender, EventArgs e)
{
if (cbbMaDatPhong.Items.Count == 0)
{
MessageBox.Show("Chưa Có Mã Đặt Phòng Nào Được Tạo");
return;
}
MessageBox.Show("Bạn Đã Xóa Đặt Phòng Có Mã " + (cbbMaDatPhong.SelectedItem as DatPhong).MaDatPhong);
DatPhongDAO.Instances.XoaToanBoDatPhongTheoMaDatPhong((cbbMaDatPhong.SelectedItem as DatPhong).MaDatPhong);
LoadDanhSachDatPhong();
LoadDanhSachPhong();
LoadDanhSachDatPhongVaocbb();
}
private void grVDatPhong_CellMouseClick(object sender, DataGridViewCellMouseEventArgs e)
{
List<DatPhong> danhsachdatphong = DatPhongDAO.Instances.HienThiDanhSachDatPhong();
int i = 0;
foreach (var item in danhsachdatphong)
{
grVDatPhong.Rows[i].Tag = item;
i++;
}
foreach (DataGridViewRow item in grVDatPhong.Rows)
{
DatPhong datPhong = item.Tag as DatPhong;
if (item.Selected)
{
foreach (var item1 in cbbMaDatPhong.Items)
{
if (datPhong.MaDatPhong == (item1 as DatPhong).MaDatPhong)
{
cbbMaDatPhong.SelectedItem = item1;
}
}
if (datPhong.NgayDi.ToString() == "")
dtpNgayDi.Value = dtpNgayDi.MaxDate;
else
{
dtpNgayDi.Value = (DateTime)datPhong.NgayDi;
}
dtpNgayO.Value = datPhong.NgayO;
maphong = datPhong.MaPhong;
Phong phong = PhongDAO.Instances.LoadPhongTheoMaPhong(datPhong.MaPhong);
int IndexLoaiPhong = TimKiemLoaiPhongVuaChon(phong.MA, phong.MaLoaiPhong);
int IndexPhong = TimKiemPhongVuaChon(phong.MA, phong.MaLoaiPhong);
LoadLoaiPhongVaocbbLoaiPhong(IndexLoaiPhong);
LoadPhongVaocbbPhong(IndexPhong, phong.MaLoaiPhong);
txtTraTruoc.Text = datPhong.TraTruoc.ToString();
//for (int b = 0; b < cbbDichVu.Items.Count; b++)
//{
// DichVu dichVu = cbbDichVu.Items[b] as DichVu;
// if (dichVu.MaDV == datPhong.MaDV)
// cbbDichVu.SelectedIndex = b;
//}
//txtSoPhan.Text = datPhong.SoLuong.ToString();
KhachHang_huy khachHang_Huy = KhachHangDAO_huy.Instances.TimKHTheoMAKH(datPhong.MaKH);
txtBChungMinhThu.Text = khachHang_Huy.ChungMinhThu;
}
}
}
public void fDatPhong_Load(object sender, EventArgs e)
{
LoadDanhSachDatPhong();
LoadDanhSachPhong();
LoadDanhSachThongKeTinhTrangPhong();
LoadLoaiPhongVaocbb();
LoadDanhSachDatPhongVaocbb();
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
using System.Data.SqlClient;
using QuanLiKhachSan.DAO;
namespace QuanLiKhachSan
{
public partial class fManager : Form
{
static Image closeImage, closeImageAct;
public fManager()
{
InitializeComponent();
AddTabPages(fDatPhong);
}
private static fManager instances;
private void tabHienThitiep_DrawItem(object sender, DrawItemEventArgs e )
{
Rectangle rect = tabHienThi.GetTabRect(e.Index);
Rectangle imageRec = new Rectangle(rect.Right - closeImage.Width, rect.Top + (rect.Height - closeImage.Height) / 2,
closeImage.Width, closeImage.Height);
rect.Size = new Size(rect.Width + 24, 38);
//chon tab
Font f;
Brush br = Brushes.Black;
StringFormat strF = new StringFormat(StringFormat.GenericDefault);
if (tabHienThi.SelectedTab == tabHienThi.TabPages[e.Index])
{
f = new Font("Arial", 10, FontStyle.Underline);
}
else
{
e.Graphics.DrawImage(closeImage, imageRec);
f = new Font("Arial", 10, FontStyle.Regular);
e.Graphics.DrawString(tabHienThi.TabPages[e.Index].Text, f, br, rect, strF);
}
}
public void AddTabPages(Form frm)
{
int t = KTFormTonTai(frm);
if (t >= 0)
{
if (tabHienThi.SelectedTab == tabHienThi.TabPages[t])
{
MessageBox.Show("form da duoc chon");
}
else {
tabHienThi.SelectedTab = tabHienThi.TabPages[t];
};
}
else //them tab moi
{
TabPage newTab = new TabPage(frm.Text.Trim());
tabHienThi.TabPages.Add(newTab);
frm.TopLevel = false;
TabPage a = tabHienThi.SelectedTab;
frm.Parent = newTab;
tabHienThi.SelectedTab = tabHienThi.TabPages[tabHienThi.TabCount - 1];
frm.Show();
frm.Dock = DockStyle.Fill;
}
}
private int KTFormTonTai(Form frm)
{
for(int i=0;i<tabHienThi.TabCount;i++)
{
if (tabHienThi.TabPages[i].Text == frm.Text.Trim())
return i;
}
return -1;
}
private static fDanhSachNhanVien fDanhSachNhanVien=new fDanhSachNhanVien();
private void theeToolStripMenuItem_Click(object sender, EventArgs e)
{
AddTabPages(fDanhSachNhanVien);
}
private void menuStrip1_ItemClicked(object sender, ToolStripItemClickedEventArgs e)
{
}
private void tabHienThi_DrawItem(object sender, DrawItemEventArgs e=null)
{
Rectangle rect = tabHienThi.GetTabRect(e.Index);
Rectangle imageRec = new Rectangle(rect.Right-closeImage.Width,rect.Top+(rect.Height-closeImage.Height)/2,
closeImage.Width,closeImage.Height);
rect.Size = new Size(rect.Width + 24, 38);
//chon tab
Font f;
Brush br = Brushes.Black;
StringFormat strF = new StringFormat(StringFormat.GenericDefault);
if(tabHienThi.SelectedTab==tabHienThi.TabPages[e.Index])
{
e.Graphics.DrawImage(closeImageAct, imageRec);
f = new Font("Arial", 10, FontStyle.Underline);
e.Graphics.DrawString(tabHienThi.TabPages[e.Index].Text, f, br, rect,strF);
}
else
{
e.Graphics.DrawImage(closeImage, imageRec);
f = new Font("Arial", 10, FontStyle.Regular);
e.Graphics.DrawString(tabHienThi.TabPages[e.Index].Text, f, br, rect, strF);
}
}
String macv;
public static fManager Instances { get {
if (instances == null) instances = new fManager(); return instances;
} set => instances = value; }
public object a { get; private set; }
public void tao()
{
string query = "SELECT MACHUCVU FROM TAIKHOAN WHERE TENTAIKHOAN='" + fLogin.TaiKhoan + "'";
DataTable dt = DataProvider.Instance.ExecuteQuery(query, new object[] { });
macv = dt.Rows[0]["MACHUCVU"].ToString().Trim();
}
private void fManager_Load(object sender, EventArgs e)
{
tao();
if (macv != "CV0002")
{
adminToolStripMenuItem.Visible = false;
}
Size mysize = new System.Drawing.Size(10, 17);
Bitmap bt = new Bitmap(Properties.Resources.close);
Bitmap btm = new Bitmap(bt, mysize);
closeImageAct = btm;
//
Bitmap bt2 = new Bitmap(Properties.Resources.closeBlack);
Bitmap btm2 = new Bitmap(bt2, mysize);
closeImage = btm2;
tabHienThi.Padding = new Point(30);
}
private static fThongTinTaiKhoan fThongTinTaiKhoan = new fThongTinTaiKhoan();
private void thôngTinNhânViênToolStripMenuItem_Click(object sender, EventArgs e)
{
AddTabPages(fThongTinTaiKhoan);
}
private static fSoDoPhong fSoDoPhong = new fSoDoPhong();
private static fDatPhong fDatPhong=new fDatPhong();
private void btnSoDoPhong_Click(object sender, EventArgs e)
{
AddTabPages(fSoDoPhong);
}
private static fCongNoTraPhong fCongNoTraPhong = new fCongNoTraPhong();
private void btnCongNo_Click(object sender, EventArgs e)
{
AddTabPages(fCongNoTraPhong);
}
private static fHoaDonDichVu fHoaDonDichVu = new fHoaDonDichVu();
private void btnHoaDonDichVu_Click(object sender, EventArgs e)
{
AddTabPages(fHoaDonDichVu);
}
private static fThongTinKhach fThongTinKhach = new fThongTinKhach();
private void btnThôngTinKhach_Click(object sender, EventArgs e)
{
AddTabPages(fThongTinKhach);
}
private static fBaoCaoHoaDon fBaoCaoHoaDon = new fBaoCaoHoaDon();
private void btnBaoCaoHoaDon_Click(object sender, EventArgs e)
{
AddTabPages(fBaoCaoHoaDon);
}
private fLichLamViec fLichLamViec = new fLichLamViec();
private void btnLichLamViec_Click(object sender, EventArgs e)
{
AddTabPages(fLichLamViec);
}
private void lichLamViêcToolStripMenuItem_Click(object sender, EventArgs e)
{
AddTabPages(fPhanCongLichLamViec);
}
private void thoatToolStripMenuItem1_Click(object sender, EventArgs e)
{
this.Close();
}
private void thoatToolStripMenuItem_Click(object sender, EventArgs e)
{
this.Close();
}
private static fPhanChucVu fPhanChucVu = new fPhanChucVu();
private void phânChưcVuToolStripMenuItem_Click(object sender, EventArgs e)
{
AddTabPages(fPhanChucVu);
}
private static fPhanCongLichLamViec fPhanCongLichLamViec = new fPhanCongLichLamViec();
private void phânLichLamViêcToolStripMenuItem_Click(object sender, EventArgs e)
{
AddTabPages(fPhanCongLichLamViec);
}
private void tabHienThi_MouseClick(object sender, MouseEventArgs e)
{
for (int i = 0; i < tabHienThi.TabCount; i++)
{
Rectangle rect = tabHienThi.GetTabRect(i);
Rectangle imageRec = new Rectangle(rect.Right - closeImage.Width, rect.Top + (rect.Height - closeImage.Height) / 2,
closeImage.Width, closeImage.Height);
if (imageRec.Contains(e.Location))
tabHienThi.TabPages.Remove(tabHienThi.SelectedTab);
}
}
private void adminToolStripMenuItem_Click(object sender, EventArgs e)
{
}
private void tabHienThi_SelectedIndexChanged(object sender, EventArgs e)
{
fSoDoPhong.fSoDoPhong_Load(fSoDoPhong, e);
fDatPhong.fDatPhong_Load(fDatPhong, e);
fCongNoTraPhong.LoadCongNo();
fDatDichVu.fDatDichVu_Load(fDatDichVu, e);
fHoaDonDichVu.fHoaDonDichVu_Load(fHoaDonDichVu, e);
fLichLamViec.hienthi();
fDanhSachNhanVien.fDanhSachNhanVien_Load(fDanhSachNhanVien,e);
fPhanCongLichLamViec.fPhanCongLichLamViec_Load(fPhanCongLichLamViec, e);
fThongTinKhach.fThongTinKhach_Load(fThongTinKhach, e);
}
private static fDatDichVu fDatDichVu = new fDatDichVu();
private void btnDatDichVu_Click(object sender, EventArgs e)
{
AddTabPages(fDatDichVu);
}
private void btnDatPhong_Click(object sender, EventArgs e)
{
AddTabPages(fDatPhong);
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using QuanLiKhachSan.DTO;
using System.Data;
namespace QuanLiKhachSan.DAO
{
public class LichLamViecDAO
{
private static LichLamViecDAO instances;
public LichLamViecDAO() { }
public static LichLamViecDAO Instances
{
get
{
if (instances == null) instances = new LichLamViecDAO(); return instances;
}
set => instances = value;
}
public bool ThemLichLamViec(EC_LichLamViec EC)
{
string query = " execute Them_LichlamViec @NGAYLAMVIEC , @BUOI , @MANHANVIEN";
DataProvider.Instance.ExecuteQuery(query, new object[] { EC.NgayLamViec,EC.Buoi,EC.MaNhanVien });
return true;
}
public bool SuaLichLamViec(EC_LichLamViec EC)
{
string query = " execute Sua_LichlamViec @MaLLV , @NGAYLAMVIEC , @BUOI , @MANHANVIEN";
DataProvider.Instance.ExecuteQuery(query, new object[] {EC.MaLichLamViec ,EC.NgayLamViec, EC.Buoi, EC.MaNhanVien });
return true;
}
public bool XoaLichLamViec(EC_LichLamViec EC)
{
string query = " execute Xoa_LichlamViec @ma";
DataProvider.Instance.ExecuteQuery(query, new object[] { EC.MaLichLamViec });
return true;
}
public DataTable Taobang2(string dieukien)
{
string query = " SELECT LICHLAMVIEC.NGAYLAMVIEC, LICHLAMVIEC.BUOI, NHANVIEN.TENNHANVIEN FROM NHANVIEN INNER JOIN TAIKHOAN ON NHANVIEN.MANHANVIEN = TAIKHOAN.MANHANVIEN INNER JOIN LICHLAMVIEC ON NHANVIEN.MANHANVIEN = LICHLAMVIEC.MANHANVIEN WHERE TAIKHOAN.TENTAIKHOAN='" + dieukien + "'";
DataTable dt = DataProvider.Instance.ExecuteQuery(query, new object[] { });
return dt;
}
public DataTable Taobang(string dieukien)
{
string query = " SELECT LICHLAMVIEC.NGAYLAMVIEC, LICHLAMVIEC.BUOI, LICHLAMVIEC.MALICHLAMVIEC , NHANVIEN.TENNHANVIEN FROM LICHLAMVIEC INNER JOIN NHANVIEN ON LICHLAMVIEC.MANHANVIEN = NHANVIEN.MANHANVIEN" + dieukien ;
DataTable dt = DataProvider.Instance.ExecuteQuery(query, new object[] { });
return dt;
}
}
}
<file_sep>using QuanLiKhachSan.DTO;
using System;
using System.Collections.Generic;
using System.Data;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace QuanLiKhachSan.DAO
{
public class DatPhongDAO
{
private static DatPhongDAO instances;
public static DatPhongDAO Instances { get {
if (instances == null) instances = new DatPhongDAO(); return instances;
}set => instances = value; }
public List<DatPhong> HienThiDanhSachDatPhong()
{
List<DatPhong> danhsachdatphong = new List<DatPhong>();
string query = "execute DanhSachDatPhong";
DataTable data = DataProvider.Instance.ExecuteQuery(query);
foreach (DataRow item in data.Rows)
{
DatPhong datPhong = new DatPhong(item);
danhsachdatphong.Add(datPhong);
}
return danhsachdatphong;
}
public List<DatPhong> HienThiDanhSachDatPhongChuaThanhToan()
{
List<DatPhong> danhsachdatphong = new List<DatPhong>();
string query = "execute DanhSachDatPhongChuaThanhToan";
DataTable data = DataProvider.Instance.ExecuteQuery(query);
foreach (DataRow item in data.Rows)
{
DatPhong datPhong = new DatPhong(item);
danhsachdatphong.Add(datPhong);
}
return danhsachdatphong;
}
public List<DatPhong> HienThiDanhSachDatPhongVaocbb()
{
List<DatPhong> danhsachdatphong = new List<DatPhong>();
string query = "execute DanhSachDatPhongVaocbb";
DataTable data = DataProvider.Instance.ExecuteQuery(query);
foreach (DataRow item in data.Rows)
{
DatPhong datPhong = new DatPhong(item, 2);
danhsachdatphong.Add(datPhong);
}
return danhsachdatphong;
}
public List<DatPhong> HienThiDanhSachDatPhong(string MaDP)
{
List<DatPhong> danhsachdatphong = new List<DatPhong>();
string query = "EXECUTE TimDatPhongTHeoMa @madp";
DataTable data = DataProvider.Instance.ExecuteQuery(query,new object[] {MaDP });
foreach (DataRow item in data.Rows)
{
DatPhong datPhong = new DatPhong(item);
danhsachdatphong.Add(datPhong);
}
return danhsachdatphong;
}
public void SuaDichVuVoiMaDatPhongVaMaPhong(string maDatPhong,string maPhong,string maDV,int soLuong)
{
string query = "execute SuaDichVu @maDatPhong , @MaPhong , @maDV , @soluong";
DataProvider.Instance.ExecuteQuery(query,new object[] {maDatPhong,maPhong,maDV,soLuong });
}
public List<DatPhong> HienThiDanhSachDatPhongCoCacPhongDuocGiu()
{
List<DatPhong> danhsachdatphong = new List<DatPhong>();
string query = "DanhSachDatPhongCoCacPhongDangDuocGiu";
DataTable data = DataProvider.Instance.ExecuteQuery(query);
foreach (DataRow item in data.Rows)
{
DatPhong datPhong = new DatPhong(item);
danhsachdatphong.Add(datPhong);
}
return danhsachdatphong;
}
public void XoaToanBoDatPhongTheoMaDatPhong(string maDatPhong)
{
string query = "execute XoaToanBoDatPhongTheoMaDatPhong @madatphong";
DataTable data = DataProvider.Instance.ExecuteQuery(query, new object[] { maDatPhong});
}
public void XoaDichVuTheoMaPhongVaMaDatPhong(string maDatPhong,string maPhong, string maDichVu)
{
string query = "execute XoaDichVuTheoMaPhongVaMaDatPhong @madatphong , @maphong , @madichvu";
DataProvider.Instance.ExecuteQuery(query, new object[] {maDatPhong,maPhong,maDichVu });
}
public List<DatPhong> HienThiDanhSachDatPhongMaPhongTuongUng(string maDatPhong,string maPhong)
{
List<DatPhong> danhsachdatphong = new List<DatPhong>();
string query = "HienThiDanhSachDatPhongMaPhongTuongUng @madatphong , @maphong";
DataTable data = DataProvider.Instance.ExecuteQuery(query,new object[] {maDatPhong,maPhong });
foreach (DataRow item in data.Rows)
{
DatPhong datPhong = new DatPhong(item);
danhsachdatphong.Add(datPhong);
}
return danhsachdatphong;
}
public List<DatPhong> HienThiDanhSachDatPhongCoCacPhongDangO()
{
List<DatPhong> danhsachdatphong = new List<DatPhong>();
string query = "DanhSachDatPhongCoCacPhongDangO";
DataTable data = DataProvider.Instance.ExecuteQuery(query);
foreach (DataRow item in data.Rows)
{
DatPhong datPhong = new DatPhong(item);
danhsachdatphong.Add(datPhong);
}
return danhsachdatphong;
}
public bool KiemTraPhongCoTonTaiTrongMaDP(string maPhong)
{
string query = "execute TimPhongCoTrongDatPhong @maPhong";
DataTable data = DataProvider.Instance.ExecuteQuery(query, new object[] {maPhong });
return data.Rows.Count > 0;
}
public void removeDatPhongTheoMaPhong(string maPhong)
{
string query = "execute XoaDatPhongTheoMaPhong @maPhong";
DataProvider.Instance.ExecuteQuery(query,new object[] {maPhong });
}
public void removeDatPhongTheoMaPhongVaMaDatPhong(string maDatPhong,string maPhong)
{
string query = "execute XoaPhongTheoMaDPTuongUng @madatphong , @maphong";
DataProvider.Instance.ExecuteQuery(query, new object[] { maDatPhong,maPhong });
}
public void ThemDatPhongOLien(string maPhong,string maKH,float traTruoc,DateTime ngayO,DateTime ngayDi)
{
string query =" execute ThemDatPhongOLien @MAPHONG , @MAKH , @TRATRUOC , @NGAYO , @NGAYDI ";
DataProvider.Instance.ExecuteQuery(query, new object[] { maPhong, maKH, traTruoc, ngayO, ngayDi});
}
public void ThemPhongOLienVaoDatPhong(string maDatPhong,string maPhong, string maKH, float traTruoc, DateTime ngayO, DateTime ngayDi, string MaDV, int SoLuong)
{
string query = " execute THEMPHONGVAOMADP @MADATPHONG , @MAPHONG , @MAKH , @TRATRUOC , @NGAYO , @NGAYDI , @MADV , @SOLUONG ";
DataProvider.Instance.ExecuteQuery(query, new object[] {maDatPhong, maPhong, maKH, traTruoc, ngayO, ngayDi, MaDV, SoLuong });
}
public void ThemDatPhongVaGiuPhong(string maPhong, string maKH, float traTruoc, DateTime ngayO, DateTime ngayDi)
{
string query = " execute ThemDatPhongGiuPhong @MAPHONG , @MAKH , @TRATRUOC , @NGAYO , @NGAYDI ";
DataProvider.Instance.ExecuteQuery(query, new object[] { maPhong, maKH, traTruoc, ngayO, ngayDi});
}
public void ThemPhongVaGiuPhongVaoDatPhong(string maDatPhong, string maPhong, string maKH, float traTruoc, DateTime ngayO, DateTime ngayDi, string MaDV, int SoLuong)
{
string query = " execute ThemPhongVaGiuPhongVaoDatPhong @MADATPHONG , @MAPHONG , @MAKH , @TRATRUOC , @NGAYO , @NGAYDI , @MADV , @SOLUONG ";
DataProvider.Instance.ExecuteQuery(query, new object[] { maDatPhong, maPhong, maKH, traTruoc, ngayO, ngayDi, MaDV, SoLuong });
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Data;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace QuanLiKhachSan.DTO
{
public class DichVu
{
private string maDV;
private string tenDV;
private float giaDV;
private object item;
public string MaDV { get => maDV; set => maDV = value; }
public string TenDV { get => tenDV; set => tenDV = value; }
public float GiaDV { get => giaDV; set => giaDV = value; }
public DichVu(string maDV,string tenDV,float giaDV)
{
MaDV = maDV;
TenDV = tenDV;
GiaDV = giaDV;
}
public DichVu(DataRow row)
{
MaDV = row["MaDV"].ToString();
TenDV = row["TenDV"].ToString();
GiaDV = float.Parse(row["GiaDV"].ToString());
}
public DichVu(object item)
{
this.item = item;
}
}
}<file_sep>using System;
using System.Collections.Generic;
using System.Data;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace QuanLiKhachSan.DTO
{
public class DatDichVu
{
private string maDatPhong;
private string maDichVu;
private int soLuong;
private DateTime ngayDung;
private float giaTien;
private object item;
public DatDichVu(string maDatPhong,string maDichVu,int soLuong,DateTime ngayDung,float giaTien)
{
MaDatPhong = maDatPhong;
TenDV = maDichVu;
SoLuong = soLuong;
NgayDung = ngayDung;
GiaTien = giaTien;
}
public DatDichVu(DataRow row)
{
MaDatPhong = row["madp"].ToString();
TenDV = row["TENDV"].ToString();
SoLuong = int.Parse(row["SoLuong"].ToString());
NgayDung = DateTime.Parse(row["ngayDung"].ToString());
GiaTien = float.Parse(row["giadichvuhientai"].ToString());
}
public string MaDatPhong { get => maDatPhong; set => maDatPhong = value; }
public string TenDV { get => maDichVu; set => maDichVu = value; }
public int SoLuong { get => soLuong; set => soLuong = value; }
public DateTime NgayDung { get => ngayDung; set => ngayDung = value; }
public float GiaTien { get => giaTien; set => giaTien = value; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
using System.Data.SqlClient;
using QuanLiKhachSan.DAO;
using QuanLiKhachSan.DTO;
namespace QuanLiKhachSan
{
public partial class fPhanCongLichLamViec : Form
{
List<string> listManv = new List<string>();
EC_LichLamViec ec = new EC_LichLamViec();
public fPhanCongLichLamViec()
{
InitializeComponent();
}
public void hienthi(string dieukien)
{
daLichlamviec.DataSource = LichLamViecDAO.Instances.Taobang(dieukien);
}
public void setnull()
{
txtMalichlamviec.Text = "";
cbMaNV.Text = "";
cbBuoi.Text = "";
}
private void btnThem_Click(object sender, EventArgs e)
{
txtMalichlamviec.Text = "";
List<EC_LichLamViec> list = new List<EC_LichLamViec>();
string query = "SELECT NGAYLAMVIEC, BUOI FROM LICHLAMVIEC WHERE MANHANVIEN = '"+cbMaNV.Text+"'";
DataTable dt = DataProvider.Instance.ExecuteQuery(query, new object[] { });
for (int j = 0; j < dt.Rows.Count; j++)
{
ec.NgayLamViec = Convert.ToDateTime(dt.Rows[j]["NGAYLAMVIEC"].ToString().Trim());
ec.Buoi = dt.Rows[j]["BUOI"].ToString().Trim();
list.Add(ec);
}
EC_LichLamViec[] lamViec;
lamViec = list.ToArray();
if (cbMaNV.Text == "" || cbBuoi.Text ==""||dtNgaylamviec.Text=="")
{
MessageBox.Show("Nhập đầy đủ thông tin!!!");
return;
}
else
{
foreach (EC_LichLamViec item in lamViec)
{
if (item.NgayLamViec == dtNgaylamviec.Value && item.Buoi == cbBuoi.Text)
{
MessageBox.Show("Ngày này đã có buổi tương ứng!");
return;
}
}
try
{
ec.MaNhanVien = cbMaNV.Text;
ec.Buoi = cbBuoi.Text;
ec.NgayLamViec = dtNgaylamviec.Value;
LichLamViecDAO.Instances.ThemLichLamViec(ec);
MessageBox.Show("thực hiện thành công!!!");
}
catch
{
MessageBox.Show("lỗi!!!");
return;
}
setnull();
hienthi("");
}
}
private void btnSua_Click(object sender, EventArgs e)
{
List<EC_LichLamViec> list = new List<EC_LichLamViec>();
string query = "SELECT NGAYLAMVIEC, BUOI FROM LICHLAMVIEC WHERE MANHANVIEN = '" + cbMaNV.Text + "'";
DataTable dt = DataProvider.Instance.ExecuteQuery(query, new object[] { });
for (int j = 0; j < dt.Rows.Count; j++)
{
ec.NgayLamViec = Convert.ToDateTime(dt.Rows[j]["NGAYLAMVIEC"].ToString().Trim());
ec.Buoi = dt.Rows[j]["BUOI"].ToString().Trim();
list.Add(ec);
}
EC_LichLamViec[] lamViec2;
lamViec2 = list.ToArray();
if (cbMaNV.Text == "")
{
MessageBox.Show("Chọn mã nhân viên!!!");
return;
}
else
{
try
{
foreach (EC_LichLamViec item in lamViec2)
{
if (item.NgayLamViec == dtNgaylamviec.Value && item.Buoi == cbBuoi.Text)
{
MessageBox.Show("Ngày này đã có buổi tương ứng!");
return;
}
}
ec.MaLichLamViec = txtMalichlamviec.Text;
ec.MaNhanVien = cbMaNV.Text;
ec.Buoi = cbBuoi.Text;
ec.NgayLamViec = dtNgaylamviec.Value;
LichLamViecDAO.Instances.SuaLichLamViec(ec);
MessageBox.Show("thực hiện thành công!!!");
}
catch
{
MessageBox.Show("lỗi!!!");
return;
}
setnull();
hienthi("");
}
}
private void btnXoa_Click(object sender, EventArgs e)
{
if(txtMalichlamviec.Text == "")
{
MessageBox.Show("Nhập hoặc chọn mã nhân viên!!!");
return;
}
else
{
try
{
ec.MaLichLamViec = txtMalichlamviec.Text;
LichLamViecDAO.Instances.XoaLichLamViec(ec);
MessageBox.Show("Thực hiện thành công!!!");
}
catch
{
MessageBox.Show("Lỗi!!!");
return;
}
setnull();
hienthi("");
}
}
private void daLichlamviec_RowHeaderMouseClick(object sender, DataGridViewCellMouseEventArgs e)
{
txtMalichlamviec.Text = daLichlamviec.SelectedRows[0].Cells[2].Value.ToString();
//string s= daLichlamviec.SelectedRows[0].Cells[3].Value.ToString();
cbBuoi.Text = daLichlamviec.SelectedRows[0].Cells[1].Value.ToString();
dtNgaylamviec.Text = daLichlamviec.SelectedRows[0].Cells[0].Value.ToString();
string query = "select MANHANVIEN FROM LICHLAMVIEC WHERE MALICHLAMVIEC='" + daLichlamviec.SelectedRows[0].Cells[2].Value.ToString() + "'";
DataTable dt = DataProvider.Instance.ExecuteQuery(query, new object[] { });
txtMalichlamviec.Enabled = false;
cbMaNV.Text = dt.Rows[0]["MANHANVIEN"].ToString().Trim();
}
public void fPhanCongLichLamViec_Load(object sender, EventArgs e)
{
hienthi("");
txtMalichlamviec.Enabled = false;
string query = "SELECT MANHANVIEN FROM NHANVIEN";
DataTable dt = DataProvider.Instance.ExecuteQuery(query, new object[] { });
for(int i = 0; i < dt.Rows.Count; i++)
{
cbMaNV.Items.Add(dt.Rows[i]["MANHANVIEN"].ToString().Trim());
}
}
private void cbMaNV_KeyPress(object sender, KeyPressEventArgs e)
{
e.Handled = e.KeyChar != (char)Keys.Back && !char.IsSeparator(e.KeyChar) && !char.IsLetter(e.KeyChar) && !char.IsDigit(e.KeyChar);
}
private void txtBuoi_KeyPress(object sender, KeyPressEventArgs e)
{
e.Handled = e.KeyChar != (char)Keys.Back && !char.IsSeparator(e.KeyChar) && !char.IsLetter(e.KeyChar) && !char.IsDigit(e.KeyChar);
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
using QuanLiKhachSan.DAO;
using QuanLiKhachSan.DTO;
namespace QuanLiKhachSan
{
public partial class fPhanChucVu : Form
{
EC_CHUCVU ec = new EC_CHUCVU();
public fPhanChucVu()
{
InitializeComponent();
}
void setnull()
{
txtMacv.Text = "";
txtTencv.Text = "";
}
private void hienthi(string dk)
{
dgChucvu.DataSource = ChucVuDAO.Instances.Taobang(dk);
}
private void btnThem_Click(object sender, EventArgs e)
{
txtMacv.Text = "";
if (txtTencv.Text == "")
{
MessageBox.Show("Nhập đày đủ thông tin!!!");
return;
}
else
{
try
{
ec.TenChucVu = txtTencv.Text;
ChucVuDAO.Instances.ThemChucVu(ec);
MessageBox.Show("Thực hiện thành công!!!");
}
catch
{
MessageBox.Show("Lỗi!!!");
return;
}
}
setnull();
hienthi("");
}
private void btnSua_Click(object sender, EventArgs e)
{
if (txtTencv.Text == "")
{
MessageBox.Show("Chọn hàng cần sửa!!!");
return;
}
else
{
try
{
ec.MaChucVu = txtMacv.Text;
ec.TenChucVu = txtTencv.Text;
ChucVuDAO.Instances.SuaChucVu(ec);
MessageBox.Show("Thực hiện thành công!!!");
}
catch
{
MessageBox.Show("Lỗi!!!");
return;
}
}
setnull();
hienthi("");
}
private void btnXoa_Click(object sender, EventArgs e)
{
if (txtMacv.Text == "")
{
MessageBox.Show("Chọn hàng cần xóa!!!");
return;
}
else
{
try
{
ec.MaChucVu = txtMacv.Text;
ChucVuDAO.Instances.XoaChucVu(ec);
MessageBox.Show("Thực hiện thành công!!!");
}
catch
{
MessageBox.Show("Lỗi!!!");
return;
}
}
setnull();
hienthi("");
}
private void dgChucvu_MouseClick(object sender, MouseEventArgs e)
{
txtMacv.Text = dgChucvu.SelectedRows[0].Cells[0].Value.ToString();
txtTencv.Text = dgChucvu.SelectedRows[0].Cells[1].Value.ToString();
}
private void fPhanChucVu_Load(object sender, EventArgs e)
{
setnull();
hienthi("");
txtMacv.Enabled = false;
}
private void txtTencv_KeyPress(object sender, KeyPressEventArgs e)
{
e.Handled = e.KeyChar != (char)Keys.Back && !char.IsSeparator(e.KeyChar) && !char.IsLetter(e.KeyChar) && !char.IsDigit(e.KeyChar);
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
using QuanLiKhachSan.DAO;
using QuanLiKhachSan.DTO;
namespace QuanLiKhachSan
{
public partial class fSoDoPhong : Form
{
public fSoDoPhong()
{
InitializeComponent();
}
private void LoadTenPhongTheoLoaiPhong(string maLoaiPhong)
{
List<Phong> danhsachphongtheoma = PhongDAO.Instances.LoadDanhSachTheoMaLoaiPhong(maLoaiPhong);
cbbPhong.DataSource = danhsachphongtheoma;
txtGiaPhong.Text = (cbbPhong.SelectedItem as Phong).GiaPhong.ToString();
txtDonViTien.Text = (cbbPhong.SelectedItem as Phong).DonViTienTe;
cbbPhong.DisplayMember = "MA";
}
private void LoadLoaiPhongVaocbb()
{
List<LoaiPhong> danhsachloaiphong = LoaiPhongDAO.Instances.DanhSachLoaiPhong();
cbbLoaiPhong.DataSource = danhsachloaiphong;
cbbLoaiPhong.DisplayMember = "TENLOAIPHONG";
cbbChonLoaiPhongDeSua.DataSource = danhsachloaiphong;
cbbChonLoaiPhongDeSua.DisplayMember = "TENLOAIPHONG";
}
private void LoadLoaiPhongVaocbbLoaiPhong(int a)
{
List<LoaiPhong> danhsachloaiphong = LoaiPhongDAO.Instances.DanhSachLoaiPhong();
cbbLoaiPhong.DataSource = danhsachloaiphong;
cbbLoaiPhong.SelectedIndex = a;
cbbLoaiPhong.DisplayMember = "TENLOAIPHONG";
}
private void LoadDanhSachThongKeTinhTrangPhong()
{
flpThongKe.Controls.Clear();
List<TableHienThiThongKeTinhTrangPhong> DanhSachThongKe = PhongDAO.Instances.LoadDanhSachThongKePhong();
foreach (var item in DanhSachThongKe)
{
Button hienthithongke = new Button() { Height=70,Width=90};
flpThongKe.Controls.Add(hienthithongke);
if (item.TinhTrangPhong == 1)
{
hienthithongke.Text = "Phòng Trống" + " Số Lượng "+item.SoLuong.ToString();
hienthithongke.BackColor = Color.White;
}
if (item.TinhTrangPhong == 2)
{
hienthithongke.Text = "Phòng được đặt ở tương lai \n Số Lượng:" + item.SoLuong.ToString();
hienthithongke.BackColor = Color.Red;
}
if (item.TinhTrangPhong == 3)
{
hienthithongke.Text = "Phòng Có Người Đặt \n Hoặc Đang Ở Số Lượng:" + item.SoLuong.ToString();
hienthithongke.BackColor = Color.LightSlateGray;
}
}
}
private void LoadDanhSach()
{
flpDanhSachCacPhong.Controls.Clear();
List<Phong> danhsachPhong = PhongDAO.Instances.LoadDanhSach();
foreach (var item in danhsachPhong)
{
Button nut = new Button() { Width = 100, Height = 100 };
flpDanhSachCacPhong.AutoScroll = true;
flpDanhSachCacPhong.Controls.Add(nut);
nut.Tag = item;
nut.Click += btnclick;
nut.Text = item.MA + "\n" + item.TenPhong;
if (item.TinhTrangPhong == 1)
{
nut.BackColor = Color.White;
}
if (item.TinhTrangPhong == 2)
{
nut.BackColor = Color.Red;
}
if (item.TinhTrangPhong == 3)
{
nut.BackColor = Color.LightSlateGray;
nut.MouseDown += Nut_MouseDown;
}
}
}
private void Nut_MouseDown(object sender, MouseEventArgs e)
{
if (e.Clicks==2)
{
fBaoCaoHoaDon fm = new fBaoCaoHoaDon();
fm.MaPH = ((sender as Button).Tag as Phong).MA;
fm.ShowDialog();
LoadDanhSach();
}
}
string maphong, maloaiphong;
private void btnclick(object sender, EventArgs e)
{
maphong = ((sender as Button).Tag as Phong).MA;
maloaiphong = ((sender as Button).Tag as Phong).MaLoaiPhong;
LoadThongTinPhong(maphong);
listVThongTinPhong.Tag = (sender as Button).Tag;
int a = TimKiemLoaiPhongVuaChon(maphong, maloaiphong);
int b = TimKiemPhongVuaChon(maphong, maloaiphong);
LoadLoaiPhongVaocbbLoaiPhong(a);
LoadPhongVaocbbPhong(b, maloaiphong);
}
private void LoadPhongVaocbbPhong(int b,string maloaiphong)
{
List<Phong> danhsachphongtheoloaiphong = PhongDAO.Instances.LoadDanhSachTheoMaLoaiPhong(maloaiphong);
List<Phong> danhsachphongtheomaphong = PhongDAO.Instances.LoadDanhSachTheoMaPhong(maphong);
txtGiaPhong.Text = danhsachphongtheomaphong[0].GiaPhong.ToString();
txtDonViTien.Text = danhsachphongtheomaphong[0].DonViTienTe;
cbbPhong.DataSource = danhsachphongtheoloaiphong;
cbbPhong.SelectedIndex = b;
cbbPhong.DisplayMember = "MA";
}
private int TimKiemLoaiPhongVuaChon(string maphong,string maloaiphong)
{
int dem = 0;
List<LoaiPhong> danhsachloaiphong = LoaiPhongDAO.Instances.DanhSachLoaiPhong();
List<Phong> phong = PhongDAO.Instances.LoadDanhSachTheoMaPhong(maphong);
for (int i = 0; i < danhsachloaiphong.Count; i++)
{
if (danhsachloaiphong[i].MA == phong[0].MaLoaiPhong)
{
dem = i;
}
}
return dem;
}
private int TimKiemPhongVuaChon(string maphong, string maloaiphong)
{
int dem = 0;
List<Phong> danhsachphong = PhongDAO.Instances.LoadDanhSachTheoMaLoaiPhong(maloaiphong);
List<Phong> phong = PhongDAO.Instances.LoadDanhSachTheoMaPhong(maphong);
for (int i = 0; i < danhsachphong.Count; i++)
{
if (danhsachphong[i].MA == phong[0].MA)
{
dem = i;
}
}
return dem;
}
private void LoadThongTinPhong(string ma)
{
listVThongTinPhong.Items.Clear();
List<Phong> hienthithongtinphong = PhongDAO.Instances.LoadDanhSachTheoMaPhong(ma);
foreach (var item in hienthithongtinphong)
{
ListViewItem listView = new ListViewItem(item.MA);
listView.SubItems.Add(item.TenPhong);
listView.SubItems.Add(item.TinhTrangPhong.ToString());
listView.SubItems.Add(item.MaLoaiPhong);
listView.SubItems.Add(item.GiaPhong.ToString());
listView.SubItems.Add(item.DonViTienTe);
listVThongTinPhong.Items.Add(listView);
}
}
private void LoadThongTinPhong()
{
listVThongTinPhong.Items.Clear();
List<Phong> hienthithongtinphong = PhongDAO.Instances.LoadDanhSach();
foreach (var item in hienthithongtinphong)
{
ListViewItem listView = new ListViewItem(item.MA);
listView.SubItems.Add(item.TenPhong);
listView.SubItems.Add(item.TinhTrangPhong.ToString());
listView.SubItems.Add(item.MaLoaiPhong);
listView.SubItems.Add(item.GiaPhong.ToString());
listView.SubItems.Add(item.DonViTienTe);
listVThongTinPhong.Items.Add(listView);
}
}
private void cbbLoaiPhong_SelectedIndexChanged(object sender, EventArgs e)
{
string maLoaiPhong;
ComboBox cb = sender as ComboBox;
if (cb.SelectedItem == null) return;
LoaiPhong loaiPhong = cb.SelectedItem as LoaiPhong;
maLoaiPhong = loaiPhong.MA;
LoadTenPhongTheoLoaiPhong(maLoaiPhong);
}
private void btnAdd_Click(object sender, EventArgs e)
{
Phong table = listVThongTinPhong.Tag as Phong;
string tenPhong = (cbbPhong.SelectedItem as Phong).TenPhong;
string maLoaiPhong = (cbbLoaiPhong.SelectedItem as LoaiPhong).MA;
string donViTienTe = txtDonViTien.Text;
float a;
var thugiaphong = float.TryParse(txtGiaPhong.Text, out a);
if (thugiaphong)
{
float giaPhong = float.Parse(txtGiaPhong.Text);
PhongDAO.Instances.insertPhong(tenPhong, maLoaiPhong, giaPhong, donViTienTe);
LoadDanhSach();
LoadDanhSachThongKeTinhTrangPhong();
MessageBox.Show("Thành Công");
}
else MessageBox.Show("Bạn Phải Nhập Giá Phòng Kiểu Số");
}
private void cbbPhong_SelectedIndexChanged(object sender, EventArgs e)
{
string ma = (cbbPhong.SelectedItem as Phong).MA;
txtGiaPhong.Clear();
txtGiaPhong.Text = (cbbPhong.SelectedItem as Phong).GiaPhong.ToString();
txtDonViTien.Clear();
txtDonViTien.Text = (cbbPhong.SelectedItem as Phong).DonViTienTe.ToString();
LoadThongTinPhong(ma);
}
private void btnUpdate_Click(object sender, EventArgs e)
{
Phong table = listVThongTinPhong.Tag as Phong;
string maPhong = (cbbPhong.SelectedItem as Phong).MA;
string tenPhong = (cbbPhong.SelectedItem as Phong).TenPhong;
string maLoaiPhong = (cbbChonLoaiPhongDeSua.SelectedItem as LoaiPhong).MA;
string donViTienTe = txtDonViTien.Text;
float a;
var thugiaphong = float.TryParse(txtGiaPhong.Text, out a);
if (thugiaphong)
{
float giaPhong = float.Parse(txtGiaPhong.Text);
PhongDAO.Instances.UpdatePhong(maPhong,tenPhong, maLoaiPhong, giaPhong, donViTienTe);
LoadDanhSach();
LoadThongTinPhong(maPhong);
LoadDanhSachThongKeTinhTrangPhong();
MessageBox.Show("Thành Công");
}
else MessageBox.Show("Bạn Phải Nhập Giá Phòng Kiểu Số");
}
private void btnHienThiToanBoDanhSach_Click(object sender, EventArgs e)
{
LoadThongTinPhong();
}
public void fSoDoPhong_Load(object sender, EventArgs e)
{
LoadDanhSach();
LoadLoaiPhongVaocbb();
LoadDanhSachThongKeTinhTrangPhong();
}
private void btnRemove_Click(object sender, EventArgs e)
{
string maPhong = (cbbPhong.SelectedItem as Phong).MA;
string loaiPhong = (cbbPhong.SelectedItem as Phong).MaLoaiPhong;
bool kiemtradexoa = DatPhongDAO.Instances.KiemTraPhongCoTonTaiTrongMaDP(maPhong);
if (kiemtradexoa) DatPhongDAO.Instances.removeDatPhongTheoMaPhong(maPhong);
if (maPhong != maphong)
{
MessageBox.Show("Ban da xoa Phong " + maPhong);
PhongDAO.Instances.RemovePhong(maPhong);
}
else
{
MessageBox.Show("Ban da xoa Phong " + maphong);
PhongDAO.Instances.RemovePhong(maPhong);
}
LoadTenPhongTheoLoaiPhong(loaiPhong);
LoadDanhSach();
LoadThongTinPhong(maPhong);
LoadDanhSachThongKeTinhTrangPhong();
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using QuanLiKhachSan.DTO;
using System.Data;
namespace QuanLiKhachSan.DAO
{
public class ChucVuDAO
{
private static ChucVuDAO instances;
public ChucVuDAO() { }
public static ChucVuDAO Instances
{
get
{
if (instances == null) instances = new ChucVuDAO(); return instances;
}
set => instances = value;
}
public bool ThemChucVu(EC_CHUCVU ec)
{
string query = " execute ThemChucVu @TenCV";
DataProvider.Instance.ExecuteQuery(query, new object[] { ec.TenChucVu });
return true;
}
public bool SuaChucVu(EC_CHUCVU ec)
{
string query = " execute SuaChucVu @MaCV , @TenCV";
DataProvider.Instance.ExecuteQuery(query, new object[] {ec.MaChucVu, ec.TenChucVu });
return true;
}
public bool XoaChucVu(EC_CHUCVU ec)
{
string query = " execute XoaChucvu_thinh @Macv";
DataProvider.Instance.ExecuteQuery(query, new object[] { ec.MaChucVu });
return true;
}
public DataTable Taobang(string dieukien)
{
string query = "select * from CHUCVU" + dieukien;
DataTable dt = DataProvider.Instance.ExecuteQuery(query, new object[] { });
return dt;
}
}
}
<file_sep>using QuanLiKhachSan.DTO;
using System;
using System.Collections.Generic;
using System.Data;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace QuanLiKhachSan.DAO
{
public class LoaiPhongDAO
{
private static LoaiPhongDAO instances;
internal static LoaiPhongDAO Instances { get {
if (instances == null)
instances = new LoaiPhongDAO();
return instances;
} private set => instances = value; }
private LoaiPhongDAO() { }
public List<LoaiPhong> DanhSachLoaiPhong()
{
string query = "select * from LoaiPhong";
DataTable data = DataProvider.Instance.ExecuteQuery(query);
List<LoaiPhong> danhsachloaiphong = new List<LoaiPhong>();
foreach (DataRow item in data.Rows)
{
LoaiPhong loaiPhong = new LoaiPhong(item);
danhsachloaiphong.Add(loaiPhong);
}
return danhsachloaiphong;
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Data;
namespace QuanLiKhachSan.DAO
{
public class HOADON
{
//public string MAHD { get; set; }
//public DateTime NGAYTHANHTOAN { get; set; }
//public string MAKH { get; set; }
//public string MANHANVIEN { get; set; }
public static DataTable dSMAHD()
{
return DAO.DataProvider.Instance.ExecuteQuery("exec dbo.dsMAHD_bay");
}
public static DataTable getHD()
{
return DAO.DataProvider.Instance.ExecuteQuery("exec dbo.getHD");
}
public static DataTable getHD(string date1, string date2)
{
return DAO.DataProvider.Instance.ExecuteQuery("exec dbo.getHD_date1_date2 @date1='"+date1+"', @date2='"+date2+"'");
}
public static DataTable TimHD(string mahd)
{
string query = "exec [dbo].[TimHD] @mahd";
return DataProvider.Instance.ExecuteQuery(query, new object[] { mahd});
}
public static int ThemHD(string date,string makh, string manv,string tienphong,string tiendv)
{
return DAO.DataProvider.Instance.ExecuteNonQuery("exec dbo.themHD_bay @date='"+date+"',@makh='"+makh+"',@manv='"+manv+"',@tienphong="+tienphong+",@tiendv="+tiendv+"");
}
public static string TimMAHDVuaTao()
{
DataTable dt = DataProvider.Instance.ExecuteQuery("exec dbo.TimMAHDVuaTao_bay");
return dt.Rows[0]["MAHD"].ToString();
}
public static string NgayThanhToanMoiNhat()
{
DataTable dt = DataProvider.Instance.ExecuteQuery("exec dbo.NgayThanhToanMoiNhat_bay");
return dt.Rows[0]["NGAYTHANHTOAN"].ToString();
}
//xoa hoadon
public static int DeleteHD(string mahd)
{
return DataProvider.Instance.ExecuteNonQuery("exec dbo.DeleteHD_mahd_bay @mahd='"+mahd+"'");
}
}
}
<file_sep>using QuanLiKhachSan.DAO;
using QuanLiKhachSan.DTO;
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Globalization;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
namespace QuanLiKhachSan
{
public partial class fReport : Form
{
public fReport()
{
InitializeComponent();
}
private string maHD;
public string MaHD
{
get { return maHD; }
set { maHD = value; }
}
private string maKH;
public string MaKH
{
get { return maKH; }
set { maKH = value; }
}
private string maPhong;
public string MaPhong
{
get { return maPhong; }
set { maPhong = value; }
}
private string maDatPhong;
public string MaDatPhong
{
get { return maDatPhong; }
set { maDatPhong = value; }
}
private string ngayTT;
public string NgayTT
{
get { return ngayTT; }
set { ngayTT = value; }
}
private string traTruoc;
public string TraTruoc
{
get { return traTruoc; }
set { traTruoc = value; }
}
public static DataTable dttbTP { get; set; }
public static DataTable dttbTDV { get; set; }
public static KHACHHANG kh;
private void fReport_Load(object sender, EventArgs e)
{
grvTienPhong.DataSource = dttbTP;
grvTTDV.DataSource = dttbTDV;
kh = CTHD.TimKH(MaKH);
LoadTTKH(kh);
LoadTTTien();
lblNgaytt.Text = NgayTT;
lblMahd.Text = MaHD;
lblMadp.Text = "madap";
this.reportViewer1.RefreshReport();
// groupBox3.Height = (grvTienPhong.RowCount) * grvTienPhong.RowTemplate.Height * 2;
// groupBox4.Height = (grvTTDV.RowCount) * grvTTDV.RowTemplate.Height * 2;
}
private void LoadTTKH(KHACHHANG kh)
{
lblMaKH.Text = kh.MAKH;
lblHoTen.Text = kh.TENKH;
lblGioiTinh.Text = kh.GIOITINH.ToString();
lblNgaySinh.Text = kh.NGAYSINH.ToString("dd/MM/yyyy");
lblCMND.Text = kh.CMND;
lblDiaChi.Text = kh.DIACHI;
}
private void LoadTTTien()
{
CultureInfo cul = CultureInfo.GetCultureInfo("vi-VN");
int tienphong = 0;
int tiendv = 0;
foreach (DataGridViewRow item in grvTienPhong.Rows)
{
tienphong += Convert.ToInt32(item.Cells[5].Value);
}
foreach (DataGridViewRow item in grvTTDV.Rows)
{
tiendv += Convert.ToInt32(item.Cells[4].Value);
}
lblTienthuephong.Text = tienphong.ToString("#,###", cul.NumberFormat) + "(VNĐ)";
if (tiendv != 0) lblTiendichvu.Text = tiendv.ToString("#,###", cul.NumberFormat) + "(VNĐ)"; else lblTiendichvu.Text = "0.000(VNĐ)";
lblTongtien.Text = (tienphong + tiendv).ToString("#,###", cul.NumberFormat) + "(VNĐ)";
lblTratruoc.Text = int.Parse(traTruoc).ToString("#,###", cul.NumberFormat) + "VNĐ";
lblConlai.Text = ((tienphong + tiendv) - int.Parse(traTruoc)).ToString("#,###", cul.NumberFormat) + "(VNĐ)";
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Data;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace QuanLiKhachSan.DTO
{
public class NHANVIEN
{
public string manv { get; set; }
public string machucvu { get; set; }
public NHANVIEN(DataRow row)
{
this.manv=row["MANHANVIEN"].ToString();
this.machucvu = row["MACHUCVU"].ToString();
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Data;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace QuanLiKhachSan.DTO
{
public class DatPhong
{
private string maDatPhong;
private string maPhong;
private string maKH;
private float traTruoc;
private DateTime ngayO;
private DateTime? ngayDi;
private float giaPhongHienTai;
private object item;
private bool trangThaiThanhToan;
public string MaDatPhong { get => maDatPhong; set => maDatPhong = value; }
public string MaPhong { get => maPhong; set => maPhong = value; }
public string MaKH { get => maKH; set => maKH = value; }
public float TraTruoc { get => traTruoc; set => traTruoc = value; }
public DateTime NgayO { get => ngayO; set => ngayO = value; }
public DateTime? NgayDi { get => ngayDi; set => ngayDi = value; }
public bool TrangThaiThanhToan { get => trangThaiThanhToan; set => trangThaiThanhToan = value; }
public float GiaPhongHienTai { get => giaPhongHienTai; set => giaPhongHienTai = value; }
public DatPhong(string maDatPhong, string maPhong, string maKH, float traTruoc, DateTime ngayO, DateTime ngayDi,
bool trangThaiThanhToan,float giaPhongHienTai)
{
this.MaDatPhong=maDatPhong;
this.MaPhong=maPhong;
this.MaKH=maKH;
this.TraTruoc=traTruoc;
this.NgayO=ngayO;
this.NgayDi=ngayDi;
this.TrangThaiThanhToan = trangThaiThanhToan;
this.GiaPhongHienTai = giaPhongHienTai;
}
public DatPhong(DataRow row)
{
this.MaDatPhong = row["MADATPHONG"].ToString();
this.MaPhong = row["MAPHONG"].ToString();
this.MaKH = row["MAKH"].ToString();
this.TraTruoc = float.Parse(row["TRATRUOC"].ToString());
this.NgayO = (DateTime)row["NGAYO"];
if(row["NGAYDI"].ToString()!="")
this.NgayDi = (DateTime?)row["NGAYDI"];
this.TrangThaiThanhToan = (bool)row["TrangThaiThanhToan"];
this.GiaPhongHienTai = float.Parse(row["GiaPhongHienTai"].ToString());
}
public DatPhong(DataRow row,int a)
{
this.MaDatPhong = row["MADATPHONG"].ToString();
}
public DatPhong(object item)
{
this.item = item;
}
}
}
<file_sep>using QuanLiKhachSan.DTO;
using System;
using System.Collections.Generic;
using System.Data;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace QuanLiKhachSan.DAO
{
public class PhongDAO
{
private static PhongDAO instances;
public static PhongDAO Instances
{
get
{
if (instances == null) instances = new PhongDAO();return instances;
}
set
{
instances = value;
}
}
private PhongDAO()
{
}
public List<TableHienThiThongKeTinhTrangPhong> LoadDanhSachThongKePhong()
{
string query1 = "execute ThongKePhong";
List<TableHienThiThongKeTinhTrangPhong> danhsachthongke = new List<TableHienThiThongKeTinhTrangPhong>();
DataTable data = DataProvider.Instance.ExecuteQuery(query1);
foreach (DataRow item in data.Rows)
{
TableHienThiThongKeTinhTrangPhong table = new TableHienThiThongKeTinhTrangPhong(item);
danhsachthongke.Add(table);
}
return danhsachthongke;
}
public List<Phong> LoadDanhSach()
{
string query = "execute DanhSachPhong";
List<Phong> danhsachphong = new List<Phong>();
DataTable data = DataProvider.Instance.ExecuteQuery(query);
foreach (DataRow item in data.Rows)
{
Phong table = new Phong(item);
danhsachphong.Add(table);
}
return danhsachphong;
}
public List<Phong> LoadDanhSachTheoMaLoaiPhong(string ma) {
string query = "EXECUTE LayPhongTheoMaLoaiPhong @id";
List<Phong> danhsachphong1 = new List<Phong>();
DataTable data = DataProvider.Instance.ExecuteQuery(query, new object[] { ma});
foreach (DataRow item in data.Rows)
{
Phong table1 = new Phong(item);
danhsachphong1.Add(table1);
}
return danhsachphong1;
}
public List<Phong> LoadDanhSachTheoMavaTinhTrang(string ma)
{
string query = "EXECUTE LAYDANHSACHPHONGTHEOLOAIPHONGVATINHTRANG @id";
List<Phong> danhsachphong1 = new List<Phong>();
DataTable data = DataProvider.Instance.ExecuteQuery(query, new object[] { ma });
foreach (DataRow item in data.Rows)
{
Phong table1 = new Phong(item);
danhsachphong1.Add(table1);
}
return danhsachphong1;
}
public List<Phong> LoadDanhSachTheoMaPhong(string ma)
{
string query = "EXECUTE DANHSACHPHONGTHEOMA @id";
List<Phong> danhsachphong1 = new List<Phong>();
DataTable data = DataProvider.Instance.ExecuteQuery(query, new object[] { ma });
foreach (DataRow item in data.Rows)
{
Phong table1 = new Phong(item);
danhsachphong1.Add(table1);
}
return danhsachphong1;
}
public Phong LoadPhongTheoMaPhong(string ma)
{
string query = "EXECUTE DANHSACHPHONGTHEOMA @id";
List<Phong> danhsachphong1 = new List<Phong>();
DataTable data = DataProvider.Instance.ExecuteQuery(query, new object[] { ma });
Phong phong = new Phong(data.Rows[0]);
return phong;
}
public string KiemTraLoadDanhSach(String MA)
{
string query = "EXECUTE DANHSACHPHONGTHEOMA @ID";
DataTable data = DataProvider.Instance.ExecuteQuery(query,new object[] {MA });
if (data.Rows.Count > 0)
{
Phong phong = new Phong(data.Rows[0]);
return phong.MA;
}
return "ko co";
}
public void insertPhong(string tenPhong,string maLoaiPhong,float giaPhong, string donViTienTe)
{
string query = "execute THEMPHONG @TENPHONG , @MALOAIPHONG , @GIAPHONG , @DONVITIENTE";
DataProvider.Instance.ExecuteQuery(query, new object[] { tenPhong, maLoaiPhong, giaPhong, donViTienTe });
}
public void UpdatePhong(string maPhong ,string tenPhong, string maLoaiPhong, float giaPhong, string donViTienTe)
{
string query = "execute updatePhong @MAPHONG , @tenphong , @MALOAIPHONG , @GIAPHONG , @DONVITIENTE ";
DataProvider.Instance.ExecuteQuery(query, new object[] { maPhong, tenPhong, maLoaiPhong, giaPhong, donViTienTe });
}
public void RemovePhong(string maPhong)
{
string query = "execute RemovePhong @maphong ";
DataProvider.Instance.ExecuteQuery(query, new object[] { maPhong });
}
}
}
<file_sep>using QuanLiKhachSan.DAO;
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Globalization;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
namespace QuanLiKhachSan
{
public partial class fHoaDonDichVu : Form
{
CultureInfo cul = CultureInfo.GetCultureInfo("vi-VN");
public fHoaDonDichVu()
{
InitializeComponent();
}
private void LoadHD()
{
AutoCompleteStringCollection acsc = new AutoCompleteStringCollection();
foreach (DataRow item in HOADON.dSMAHD().Rows)
{
acsc.Add(item["MAHD"].ToString());
}
txtTK.AutoCompleteCustomSource = acsc;
dataGridView2.DataSource = HOADON.getHD();
LoadDoanhThu();
}
private int LoadDoanhThu()
{
int doanhthu = 0;
foreach (DataGridViewRow item in dataGridView2.Rows)
{
doanhthu += Convert.ToInt32(item.Cells[6].Value);
}
if (doanhthu == 0)
lblDoanhThu.Text = "0 (VNĐ)";
else lblDoanhThu.Text = doanhthu.ToString("#,###", cul.NumberFormat) + "(VNĐ)";
return doanhthu;
}
public void fHoaDonDichVu_Load(object sender, EventArgs e)
{
dateTimePicker3.Value = DateTime.Today;
AutoCompleteStringCollection acsc = new AutoCompleteStringCollection();
foreach (DataRow item in HOADON.dSMAHD().Rows)
{
acsc.Add(item["MAHD"].ToString());
}
txtTK.AutoCompleteCustomSource = acsc;
LoadHD();
if (dataGridView2.Rows.Count > 0)
dateTimePicker1.Value = DateTime.Parse(HOADON.NgayThanhToanMoiNhat());
dataGridView2.Columns["chon"].Width = 30;
dataGridView2.ReadOnly = false;
dataGridView2.Columns[1].ReadOnly = true;
dataGridView2.Columns[2].ReadOnly = true;
dataGridView2.Columns[3].ReadOnly = true;
dataGridView2.Columns[4].ReadOnly = true;
dataGridView2.Columns[5].ReadOnly = true;
dataGridView2.Columns[6].ReadOnly = true;
if (CTHD.TimNV().machucvu != "CV0002")
{
btnXoa.Visible = false;
dataGridView2.Columns["chon"].Visible = false;
}
//vcl cai loi nay ma t tim sang gio 8 tieng hoi , xóa delege xem sao, ben t binh thuong m de yen t doc cai loi
// loi lien quan toi truy xuat du lieu
txtTK.Text = "nhập mã mã hóa đơn...!";
Changebackgroundcolor_rowodd();
}
private void textBox1_Enter(object sender, EventArgs e)
{
if (txtTK.Text == "nhập mã mã hóa đơn...!")
{
txtTK.Text = "";
}
}
private void textBox1_Leave(object sender, EventArgs e)
{
if (txtTK.Text == "")
{
txtTK.Text = "nhập mã mã hóa đơn...!";
txtTK.ForeColor = Color.Gray;
}
}
private void btnXem_Click(object sender, EventArgs e)
{
dataGridView2.DataSource = HOADON.getHD(dateTimePicker1.Value.ToString("yyyy-MM-dd"), dateTimePicker3.Value.ToString("yyyy-MM-dd"));
LoadDoanhThu();
Changebackgroundcolor_rowodd();
}
private void dataGridView2_CellDoubleClick(object sender, DataGridViewCellEventArgs e)
{
if (e.RowIndex == -1) { return; }
string maHD = dataGridView2.Rows[e.RowIndex].Cells[1].Value.ToString();
fBaoCaoHoaDon fm = new fBaoCaoHoaDon();
fm.funData = maHD;
fm.ShowDialog();
}
private void btnTK_Click(object sender, EventArgs e)
{
dataGridView2.DataSource = HOADON.TimHD(txtTK.Text.Trim());
LoadDoanhThu();
txtTK.Text = null;
}
private void dateTimePicker3_KeyDown(object sender, KeyEventArgs e)
{
if (e.KeyCode == Keys.Enter)
{
btnXem_Click(this, new EventArgs());
}
}
private void txtTK_KeyDown(object sender, KeyEventArgs e)
{
if (e.KeyCode == Keys.Enter)
{
btnTK_Click(this, new EventArgs());
}
}
private void btnInhoadon_Click(object sender, EventArgs e)
{
if (dataGridView2.SelectedRows.Count==0)
{
return;
}
string maHD = dataGridView2.Rows[dataGridView2.CurrentCell.RowIndex].Cells[1].Value.ToString();
fBaoCaoHoaDon fm = new fBaoCaoHoaDon();
if(dataGridView2.SelectedRows.Count==1)
fm.funData = maHD;
else
{
List<string> lst = new List<string>();
foreach (DataGridViewRow item in dataGridView2.Rows)
{
if (item.Selected) lst.Add(item.Cells[1].Value.ToString());
}
fm.LISTMAHD = lst;
}
fm.ShowDialog();
fm.FormBorderStyle = System.Windows.Forms.FormBorderStyle.Sizable;
fm.EnableInhoadon(true);
}
private void dataGridView2_ColumnHeaderMouseClick(object sender, DataGridViewCellMouseEventArgs e)
{
Changebackgroundcolor_rowodd();
}
private void Changebackgroundcolor_rowodd()
{
for (int i = 0; i < dataGridView2.Rows.Count; i++)
{
if (i % 2 != 0)
{
dataGridView2.Rows[i].DefaultCellStyle.BackColor = System.Drawing.Color.LightGray;
}
}
}
private void btnRefresh_Click(object sender, EventArgs e)
{
LoadHD();
Changebackgroundcolor_rowodd();
}
private void btnXoa_Click(object sender, EventArgs e)
{
foreach (DataGridViewRow item in dataGridView2.Rows)
{
if (Convert.ToBoolean(item.Cells["chon"].Value) == true)
{
foreach (DataRow r in CTHD.DSCTHD_FromMHD(item.Cells[1].Value.ToString()).Rows)
{
CTHD.DeleteDatDV(r["Mã đặt phòng"].ToString());
CTHD.DeleteDatPhong(r["Mã đặt phòng"].ToString());
}
CTHD.DeleteCTHD(item.Cells[1].Value.ToString());
HOADON.DeleteHD(item.Cells[1].Value.ToString());
}
}
LoadHD();
Changebackgroundcolor_rowodd();
}
}
}
|
221c0f5a79715c10a22d235fd703302f22aee31e
|
[
"C#"
] | 43 |
C#
|
NguyenVanBay101097/QuanLiKhachSan_proc_19_6
|
07e8bd47069d5c27f9fd596b122a8e47d4128c8d
|
b0e5e6557cde4466605c8e222729654727826863
|
refs/heads/master
|
<file_sep>const { server } = require("../src/server");
const chai = require("chai");
const chaiHttp = require("chai-http");
chai.use(chaiHttp);
chai.should();
const app = server();
describe("API Server", () => {
let request;
beforeEach(() => {
request = chai.request(app);
});
describe("Restfull", () => {
it("Should print hello ", async () => {
const res = await request.get("/");
console.log(res.text);
res.text.should.deep.equal("hellllo");
});
});
});
<file_sep>const films = require("./data/Film.js");
exports.seed = function(knex, Promise) {
// Deletes ALL existing entries
return knex("film")
.del()
.then(() => {
// Inserts seed entries
return knex("film").insert(films);
});
};
<file_sep>exports.up = function(knex, Promise) {
return knex.schema.createTable("film", (table) => {
table.increments("film_id").primary();
table.string("Title", 100).notNullable();
table.string("Year").notNullable();
table.string("Rated", 5).notNullable();
table.string("Released", 20).notNullable();
table.string("Runtime").notNullable();
table.string("Genre", 250).notNullable();
table.string("Director").notNullable();
table.string("Writer").notNullable();
table.string("Actors").notNullable();
table.string("Plot").notNullable();
table.string("Language").notNullable();
table.string("Country").notNullable();
table.string("Awards", 250);
table.string("Poster", 250);
table.string("Metascore");
table.string("imdbRating");
table.string("imdbVotes");
table.string("imdbID").notNullable();
table.string("Type");
table.specificType('Images', 'text ARRAY')
});
};
exports.down = function(knex, Promise) {
return knex.schema.dropTable("film");
};
<file_sep>const express = require("express");
const router = express.Router();
const config = require("../config.js");
const knex = require("knex")(config.db);
router.get("/", (req, res) => {
return Promise.resolve(res.render("index"));
});
router.get("/api/All", (req, res) => {
return Promise.resolve(
knex("film")
.select()
.then((films) => {
res.render("all", { films: films });
})
);
});
router.get("/api/Films/:id", (req, res) => {
const id = req.params.id;
console.log(id);
return Promise.resolve(
knex("film")
.select()
.where({ film_id: id })
.then((film) => {
res.render("single", { film: film });
})
.catch((err) => {
console.log(err);
})
);
});
router.get("/api/modify", (req, res) => {
return Promise.resolve(res.render("modify"));
});
router.post("/api/modify/edit", (req, res) => {
const obj = {};
for (const key in req.body) {
if (!(!req.body[key] || /^\s*$/.test(req.body[key]))) {
if (key !== "film_id") {
obj[key] = req.body[key];
}
}
}
if (Object.keys(obj).length === 0) {
return Promise.resolve(res.render("err"));
}
return Promise.resolve(
knex("film")
.where({ film_id: req.body.film_id })
.update(obj)
.select()
.then((film) => {
res.redirect("/api/all");
})
.catch((err) => {
console.log(err);
res.render("err");
})
);
});
router.get("/api/delete", (req, res) => {
return Promise.resolve(res.render("delete"));
});
router.post("/api/delete/film", (req, res) => {
const obj = {};
for (const key in req.body) {
if (!(!req.body[key] || /^\s*$/.test(req.body[key]))) {
if (key === "film_id") {
obj[key] = req.body[key];
}
}
}
if (Object.keys(obj).length === 0) {
return Promise.resolve(res.render("err"));
}
return Promise.resolve(
knex("film")
.where({ film_id: req.body.film_id })
.del()
.then(() => {
res.redirect("/api/all");
})
.catch((err) => {
console.log(err);
res.render("err");
})
);
});
router.get("/api/films", (req, res) => {
return Promise.resolve(res.render("addFilm"));
});
router.patch("/api/Films", (req, res) => {
const film = {
Title: req.body.Title,
Year: req.body.Year,
Rated: req.body.Rated,
Released: req.body.Released,
Runtime: req.body.Runtime,
Genre: req.body.Genre,
Director: req.body.Director,
Writer: req.body.Writer,
Actors: req.body.Actors,
Plot: req.body.Plot,
Language: req.body.Langauge,
Country: req.body.Country,
Awards: req.body.Awards,
Poster: req.body.Poster,
Metascore: req.body.Metascore,
imdbRating: req.body.imdbRating,
imdbVotes: req.body.imdbVotes,
imdbID: req.body.imdbID,
Type: req.body.Type,
};
return Promise.resolve(
knex("film")
.insert(film, "film_id")
.then((ids) => {
const id = ids[0];
res.redirect(`/api/Films/${id}`);
})
);
});
module.exports = router;
|
edf70f93357d93d3e5635d52ea7d5a7d8374e013
|
[
"JavaScript"
] | 4 |
JavaScript
|
Mavroian/Api-Project
|
bf0e795affd16a1b6fd79675d5e9151749b46aaa
|
115a2bd0f41ab5643c6497ad10afbefce3212f60
|
refs/heads/master
|
<file_sep>#include <iostream>
#include <windows.h>
#include "pointer.h"
HANDLE hProcess = NULL;
bool keyDownF1 = false;
bool keyDownF2 = false;
bool keyDownF3 = false;
bool keyDownF4 = false;
bool moduleOne = false;
bool moduleTwo = false;
bool moduleThree = false;
bool moduleFour = false;
BOOL IsElevated() {
BOOL fRet = FALSE;
HANDLE hToken = NULL;
if (OpenProcessToken(GetCurrentProcess(), TOKEN_QUERY, &hToken)) {
TOKEN_ELEVATION Elevation;
DWORD cbSize = sizeof(TOKEN_ELEVATION);
if (GetTokenInformation(hToken, TokenElevation, &Elevation, sizeof(Elevation), &cbSize)) {
fRet = Elevation.TokenIsElevated;
}
}
if (hToken) {
CloseHandle(hToken);
}
return fRet;
}
bool writeInt(int newValue, int address, HANDLE hProcess) {
DWORD newdatasize = sizeof(newValue);
if (WriteProcessMemory(hProcess, (LPVOID)address, &newValue, newdatasize, NULL)) {
return true;
}
return false;
}
int readInt(int address, HANDLE hProcess) {
int result = 0;
if (ReadProcessMemory(hProcess, (LPVOID)address, &result, sizeof(result), NULL)) {
return result;
}
return -1;
}
const char* readString(int address, HANDLE hProcess) {
const char* result = "";
if (ReadProcessMemory(hProcess, (LPVOID)address, &result, sizeof(result), NULL)) {
return result;
}
return NULL;
}
HANDLE initProcess() {
HWND hWnd = FindWindow(0, "AssaultCube");
if (hWnd == 0) {
return false;
}
DWORD process_ID;
GetWindowThreadProcessId(hWnd, &process_ID);
HANDLE hProcess = OpenProcess(PROCESS_ALL_ACCESS, FALSE, process_ID);
if (!hProcess) {
MessageBox(NULL, "Could not open the process!", "404 Skill not found!", MB_OK | MB_ICONERROR);
return false;
}
return hProcess;
}
void updateKeyboard() {
if (GetKeyState(VK_F1) < 0) {
if (!keyDownF1) {
moduleOne = !moduleOne;
}
keyDownF1 = true;
}
else {
keyDownF1 = false;
}
if (GetKeyState(VK_F2) < 0) {
if (!keyDownF2) {
moduleTwo = !moduleTwo;
}
keyDownF2 = true;
}
else {
keyDownF2 = false;
}
if (GetKeyState(VK_F3) < 0) {
if (!keyDownF3) {
moduleThree = !moduleThree;
}
keyDownF3 = true;
}
else {
keyDownF3 = false;
}
if (GetKeyState(VK_F4) < 0) {
if (!keyDownF4) {
moduleFour = !moduleFour;
}
keyDownF4 = true;
}
else {
keyDownF4 = false;
}
}
void update() {
updateKeyboard();
if (moduleOne) {
int address = p_base + o_health;
int currentHealth = readInt(address, hProcess);
if (currentHealth < 90) {
writeInt(100, address, hProcess);
}
}
if (moduleThree) {
int address = p_base + o_speed;
int currentSpeed = readInt(address, hProcess);
if (currentSpeed == 10) {
if (!writeInt(1, address, hProcess)) {
MessageBox(NULL, "Failed to write at: " + address, "Excuse me WTF!", 0x00000010L);
ExitProcess(EXIT_FAILURE);
}
}
else {
}
}
}
bool initCheat() {
hProcess = initProcess();
while (true) {
update();
}
}
int main() {
if (!IsElevated()) {
MessageBox(NULL, "You need to run the application as an administrator in order to use this cheat!", "Excuse me WTF!", 0x00000010L);
ExitProcess(EXIT_FAILURE);
}
if (initCheat()) {
ExitProcess(EXIT_SUCCESS);
} else {
ExitProcess(EXIT_FAILURE);
}
}<file_sep>#pragma once
int p_base = 0x0114A860;
int o_health = 0xF8;
int o_amour = 0xFC;
int o_speed = 0x80; // 1 = Normal - 10 = fastest
int o_crouch = 0x5C; //3.375 = Crouch - 4.5 = Stand
int o_height = 0xC;
int o_jump = 0x3C;
int o_name = 0x225; //String
//Ammo Offsets
int o_ar = 0x150;
int o_ar_extra_mag = 0x128;
int o_pistol_ammo = 0x13C;
int o_pistol_extra_mag = 0x114;
int o_grenade = 0x158;
int o_akimbo_enable = 0x10C; // 0 = Disabled - 1 = Enabled
int o_akimbo_ammo = 0x15C;
int o_akimbo_extra_mag = 0x134;<file_sep># AssaultCubeCheat
An AssaultCube cheat that we are coding just for fun and it is not working... yet.
|
7604aa3bf9deecefe67756104dc680f63a16eaef
|
[
"Markdown",
"C",
"C++"
] | 3 |
C++
|
Polyfish0/AssaultCubeCheat
|
0217e1f9f68159b79d4e4088921691bc8d8b4093
|
aab7f92be6282d4ee21bea5259b1fc0dff675eef
|
refs/heads/master
|
<repo_name>sundropgold/TheAlexander<file_sep>/assets/js/main.js
$(document).ready(function(){
/* =============== HOME PAGE =============== */
// INCREMENTING NUMBERS --------------------------
var satisfaction = $('#satisfaction');
// var customers = $('#customers');
// var donations = $('#donations');
// function to increase number
function increaseNum(section, number){
var goal = number;
var start = 0;
var success = false;
while (!success) {
if (goal != start) {
// $(section).html("<h2>" + start +"</h2>");
console.log(start);
start += 1;
}
else {
clearTimeout();
}
}
}
function incrementSatisfaction(){
// setTimeOut to dynamically increase satisfaction
// numbers
console.log("here");
setTimeout(increaseNum(satisfaction,100), 500);
}
// function incrementCustomers(){
// // setTimeOut to dynamically increase customers
// // numbers
// setTimeOut(increaseNum(customers,20), 500);
// }
// function incrementDonations(){
// // setTimeOut to dynamically increase donations
// // numbers
// setTimeOut(increaseNum(donations,25), 500);
// }
incrementSatisfaction();
// incrementCustomers();
// incrementDonations();
});
|
72979f64e9644d2dcb15814bed2638c1629f64cb
|
[
"JavaScript"
] | 1 |
JavaScript
|
sundropgold/TheAlexander
|
f6f089fa9e694084a5458d06b61fdf9d89b2ea9a
|
7a44b827fc55e858c9a3621c05bfa1c844f98604
|
refs/heads/master
|
<file_sep>RSpec.describe UserSession, type: :controller do
controller(Web::ApplicationController) do
include UserSession
end
describe '#create_user_session! and #destroy_user_session!' do
it 'creates and destroys user session' do
user = create :user
controller.create_user_session!(user)
expect(session[:user_token]).to eq(user.token)
controller.destroy_user_session!
expect(session[:user_token]).to be(nil)
end
end
describe '#user_id_from_token' do
it 'returns user_id from token' do
user = create :user
controller.create_user_session!(user)
expect(controller.user_id_from_token).to eq(user.id)
end
it 'should raise Exceptions::InvalidAuthToken' do
expect{controller.user_id_from_token}.to raise_error(Exceptions::InvalidAuthToken)
end
end
describe '#authenticate_user!' do
it 'authenticates user' do
user = create :user
controller.create_user_session!(user)
expect(controller.authenticate_user!).to eq(user.id)
end
end
describe '#current_user' do
it 'returns currently logged user' do
user = create :user
controller.create_user_session!(user)
expect(controller.current_user).to eq(user)
end
end
end
<file_sep>class Forms
class Session
include ActiveAttr::Model
attribute :email
attribute :password
validates :email, presence: true
validates :password, presence: true
end
end
<file_sep># README
[](https://travis-ci.org/digaev/task_manager)
# Using
```
# Create two users - one admin and one user, both with password `<PASSWORD>`
rake db:seed
# Create one fake task
rake fake_task:create
```
## Heroku
https://shielded-basin-64587.herokuapp.com/
<file_sep>require 'rails_helper'
RSpec.describe Web::TasksController, type: :controller do
describe '#index' do
it 'lists all tasks' do
user = create :user
create_list :task, 5, user: user
allow(Task).to receive(:includes).with(:user)
expect(Task.count).to eq(5)
end
end
describe '#create' do
before do
@user = create :user
controller.create_user_session!(@user)
allow(controller).to receive(:authenticate_user!)
end
it 'creates task with permitted params' do
task = build :task, user: @user
params = {
name: task.name,
description: task.description
}
permitted_params = ActionController::Parameters.new(params).permit(
:name, :description
)
expect(Task).to receive(:new).with(permitted_params).and_return(task)
post :create, params: { task: params }
end
context 'when create succeeds' do
before do
@task = build :task, user: @user
allow(@task).to receive(:save)
end
it 'creates task and redirects to web/user/tasks#index' do
post :create, params: { user_id: @task.user_id, task: {
name: @task.name, description: @task.description
} }
expect(@user.tasks.count).to eq(1)
expect(response).to redirect_to(user_tasks_path(@task.user))
end
end
context 'when create fails' do
it 'renders #new' do
post :create, params: { task: {
name: '', description: ''
} }
expect(response.status).to eq(200)
expect(response).to render_template(:new)
end
end
end
describe '#edit' do
context 'when user is not admin' do
before do
@user = create :user
controller.create_user_session!(@user)
expect(controller).to receive(:authenticate_user!).and_return(@user)
end
it 'can edit own tasks' do
task = create :task, user: @user
get :edit, params: { id: task.id }
expect(response.status).to eq(200)
expect(response).to render_template(:edit)
end
it 'can not edit tasks which not belongs to him' do
task = create :task
expect(@user.id != task.user_id).to be(true)
get :edit, params: { id: task.id }
expect(response.status).to eq(404)
end
end
context 'when user is admin' do
before do
@admin = create :user, admin: true
controller.create_user_session!(@admin)
expect(controller).to receive(:authenticate_user!).and_return(@admin)
end
it 'can edit any tasks' do
task = create :task
expect(@admin.id != task.user_id).to be(true)
get :edit, params: { id: task.id }
expect(response.status).to eq(200)
expect(response).to render_template(:edit)
end
end
end
describe '#update' do
context 'when user is not admin' do
before do
@user = create :user
controller.create_user_session!(@user)
expect(controller).to receive(:authenticate_user!).and_return(@user)
end
it 'can update own tasks' do
task = create :task, user: @user
patch :update, params: { id: task.id, task: { name: '1', description: '2' } }
expect(response).to redirect_to(user_tasks_path(@user))
end
it 'can not update tasks which not belongs to him' do
task = create :task
expect(@user.id != task.user_id).to be(true)
patch :update, params: { id: task.id, task: { name: '1', description: '2' } }
expect(response.status).to eq(404)
end
end
context 'when user is admin' do
before do
@admin = create :user, admin: true
controller.create_user_session!(@admin)
expect(controller).to receive(:authenticate_user!).and_return(@admin)
end
it 'can update any tasks' do
task = create :task
expect(@admin.id != task.user_id).to be(true)
patch :update, params: { id: task.id, task: { name: '1', description: '2' } }
expect(response).to redirect_to(user_tasks_path(task.user))
end
end
end
describe '#show' do
context 'when user is not admin' do
before do
@user = create :user
controller.create_user_session!(@user)
expect(controller).to receive(:authenticate_user!).and_return(@user)
end
it 'can view own tasks' do
task = create :task, user: @user
get :show, params: { id: task.id }
expect(response.status).to eq(200)
expect(response).to render_template(:show)
end
it 'can not view tasks which not owned by user' do
task = create :task
expect(@user.id != task.user_id).to be(true)
get :show, params: { id: task.id }
expect(response.status).to eq(404)
end
end
context 'when user is admin' do
before do
@admin = create :user, admin: true
controller.create_user_session!(@admin)
expect(controller).to receive(:authenticate_user!).and_return(@admin)
end
it 'can view any tasks' do
task = create :task
expect(@admin.id != task.user_id).to be(true)
get :show, params: { id: task.id }
expect(response.status).to eq(200)
expect(response).to render_template(:show)
end
end
end
describe '#destroy' do
before do
@task = create :task
controller.create_user_session!(@task.user)
expect(controller).to receive(:authenticate_user!)
end
it 'destroys task' do
expect {
delete :destroy, params: { id: @task.id }
}.to change(Task, :count).by(-1)
end
end
end
<file_sep>class Web::SessionsController < Web::ApplicationController
def new
@form = Forms::Session.new
end
def create
@form = Forms::Session.new(params.require(:session).permit(:email, :password))
if @form.valid?
user = User.find_by_email(@form.email)
if user && user.authenticate(@form.password)
create_user_session!(user)
flash[:notice] = 'You are successfully signed in.'
redirect_to user_tasks_path(user) and return
else
flash[:error] = 'Invalid email or password.'
end
end
render :new
end
def destroy
flash[:notice] = 'You are successfully signed out.' if user_id_from_token
destroy_user_session!
redirect_to root_url
end
end
<file_sep>require 'rails_helper'
RSpec.describe User, type: :model do
it 'has a valid factory' do
expect(build :user).to be_valid
end
it { should have_secure_password }
describe 'schema' do
it { should have_db_column(:admin).with_options null: false, default: false }
it { should have_db_column :password_digest }
it { should have_db_index :email }
end
describe 'relations' do
it { should have_many(:tasks).dependent(:destroy) }
end
describe 'validations' do
subject { build :user }
it { should validate_presence_of :email }
it { should validate_uniqueness_of :email }
it { should validate_length_of(:password).is_at_least(6) }
end
describe '#token' do
it 'encodes ID into token' do
user = create :user
expect(user.token).to be_present
expect(Token.decode(user.token)).to eq({ 'user_id' => user.id })
end
end
end
<file_sep>FactoryGirl.define do
factory :task do
user
name { Faker::Lorem.word }
description { Faker::Lorem.sentence }
state { Task::STATES.sample }
attachment { Rack::Test::UploadedFile.new(File.join(Rails.root, 'public', 'robots.txt')) }
end
end
<file_sep>module UserSession
def create_user_session!(user)
session[:user_token] = user.token
end
def destroy_user_session!
session.delete(:user_token)
end
def authenticate_user!
user_id_from_token
rescue
redirect_to new_session_path
end
def current_user
user_id = user_id_from_token
@current_user = if @current_user && @current_user.id == user_id
@current_user
else
User.find_by_id(user_id)
end
rescue
nil
end
def user_id_from_token
token = session[:user_token]
payload = Token.decode(token)
payload['user_id'].to_i
rescue
raise Exceptions::InvalidAuthToken
end
end
<file_sep>class Web::ApplicationController < ApplicationController
include UserSession
helper_method :current_user
rescue_from ActiveRecord::RecordNotFound do
render status: 404, plain: 'Not found.'
end
end
<file_sep>module Web::Users::TasksHelper
end
<file_sep>module Exceptions
class BadCredentials < StandardError; end
class InvalidAuthToken < StandardError; end
end
<file_sep>require 'simplecov'
SimpleCov.start
SimpleCov.coverage_dir(Rails.root.join('public', 'coverage'))
<file_sep>require 'rails_helper'
RSpec.describe Web::Users::TasksController, type: :controller do
describe '#index' do
before do
@user = create :user
controller.create_user_session!(@user)
allow(controller).to receive(:authenticate_user!)
end
it 'returns all tasks of user' do
tasks = Task.includes(:user).where(user_id: @user.id)
allow(Task).to receive(:includes).and_return(tasks)
expect(tasks).to receive(:where).with(user_id: @user.id).and_return(tasks)
get :index, params: { user_id: @user.id }
end
end
end
<file_sep>class User < ApplicationRecord
has_secure_password
has_many :tasks, dependent: :destroy
validates :email, presence: true, uniqueness: true
validates :password, length: { minimum: 6 }, allow_nil: true
def token
Token.encode(user_id: id)
end
end
<file_sep>require 'rails_helper'
RSpec.describe Web::UsersController, type: :controller do
describe '#create' do
it 'creates user with permitted params' do
user = create :user
params = {
email: user.email,
password: user.password
}
permitted_params = ActionController::Parameters.new(params).permit(
:email, :password
)
expect(User).to receive(:create).with(permitted_params).and_return(user)
post :create, params: { user: params }
end
context 'when create succeeds' do
before do
@user = create :user
allow(User).to receive(:create).and_return @user
end
it 'creates session and redirects to web/users/tasks#index' do
expect(controller).to receive(:create_user_session!).with(@user)
post :create, params: { user: { email: @user.email, password: <PASSWORD> } }
expect(response).to redirect_to(user_tasks_path(@user))
end
end
context 'when create failed' do
before do
@user = build :user, password: ''
allow(User).to receive(:create).and_return @user
end
it 'renders #new' do
post :create, params: { user: { email: @user.email, password: <PASSWORD> } }
expect(response).to render_template(:new)
end
end
end
describe '#new' do
it 'creates instance of User' do
expect(User).to receive(:new)
get :new
end
end
end
<file_sep>Rails.application.routes.draw do
scope module: :web do
root to: 'tasks#index'
resources :users, only: [:new, :create] do
scope module: :users do
resources :tasks, only: [:index]
end
end
resources :tasks
resources :sessions, only: [:new, :create] do
delete :destroy, on: :collection
end
end
end
<file_sep>class Web::TasksController < Web::ApplicationController
before_action :authenticate_user!, except: [:index]
before_action :set_task, only: [:show, :edit, :update, :destroy]
def index
@tasks = Task.includes(:user).order(created_at: :desc)
end
def create
@task = Task.new(task_params)
@task.user_id = user_id_from_token
if @task.save
redirect_to user_tasks_path(@task.user)
else
render :new
end
end
def update
success = @task.update_attributes(task_params)
respond_to do |format|
format.js do
if success
render js: 'document.location.reload();'
else
render js: "alert(\"Whoops!\\n\\n#{@task.errors.full_messages.join('\n')}\");"
end
end
format.html do
if success
redirect_to user_tasks_path(@task.user)
else
render :edit
end
end
end
end
def destroy
@task.destroy
redirect_to(request.referer || tasks_path)
end
private
def set_task
@task =
if current_user.admin?
Task.find(params[:id])
else
Task.find_by_id_and_user_id!(params[:id], current_user.id)
end
end
def task_params
params.require(:task).permit(
:name, :description, :state, :attachment
)
end
end
<file_sep>require 'rails_helper'
RSpec.describe Web::SessionsController, type: :controller do
describe '#new' do
it 'creates instance of Forms::Session' do
expect(Forms::Session).to receive(:new)
get :new
end
end
describe '#create' do
it 'creates session with permitted params' do
user = create :user
form = Forms::Session.new(
email: user.email,
password: <PASSWORD>
)
params = {
email: user.email,
password: <PASSWORD>
}
permitted_params = ActionController::Parameters.new(params).permit(
:email, :password
)
expect(Forms::Session).to receive(:new).with(permitted_params).and_return(form)
post :create, params: { session: params }
end
context 'when create succeeds' do
before do
@user = create :user
allow(User).to receive(:create).and_return @user
end
it 'creates session and redirects to web/users/tasks#index' do
expect(controller).to receive(:create_user_session!).with(@user)
expect(User).to receive(:find_by_email).with(@user.email).and_return(@user)
expect(@user).to receive(:authenticate).with(@user.password).and_return(@user)
post :create, params: { session: { email: @user.email, password: <PASSWORD> } }
expect(response).to redirect_to(user_tasks_path(@user))
expect(controller).to set_flash[:notice]
end
end
context 'when create failed' do
before do
@user = create :user
@user.password = '<PASSWORD>'
allow(User).to receive(:create).and_return @user
end
it 'renders #new' do
post :create, params: { session: { email: @user.email, password: <PASSWORD> } }
expect(response).to render_template(:new)
expect(controller).to set_flash[:error]
end
end
end
describe '#destroy' do
before do
user = create :user
params = {
email: user.email,
password: <PASSWORD>
}
post :create, params: { session: params }
end
it 'destroys user session' do
allow(controller).to receive(:destroy_user_session!)
delete :destroy
expect(response).to redirect_to(root_url)
end
end
end
<file_sep>class Task < ApplicationRecord
STATES = %w(new started finished).freeze
belongs_to :user
validates :name, presence: true
validates :state, inclusion: { in: STATES }
mount_uploader :attachment, TaskAttachmentUploader
scope :most_recent_first, -> { order(created_at: :desc) }
state_machine initial: :new do
event :start do
transition any => :started
end
event :finish do
transition any => :finished
end
event :reopen do
transition any => :new
end
end
end
<file_sep>require 'rails_helper'
RSpec.describe Task, type: :model do
it 'has a valid factory' do
expect(build :task).to be_valid
end
describe 'schema' do
it { should have_db_column(:name).with_options null: false }
it { should have_db_column(:description) }
it { should have_db_column(:state) }
it { should have_db_column(:created_at).with_options null: false }
end
describe 'relations' do
it { should belong_to :user }
end
describe 'validations' do
subject { build :task }
it { should validate_presence_of :name }
it { should validate_inclusion_of(:state).in_array(Task::STATES) }
end
end
<file_sep>module ApplicationHelper
def nav_link(title, path, link_options = {})
link_options = {
title: title
}.merge!(link_options)
content_tag(:li, class: (:active if current_page?(path))) do
link_to title, path, link_options
end
end
end
<file_sep>namespace :fake_task do
desc 'Creates one fake task for each user'
task :create => :environment do
ActiveRecord::Base.transaction do
User.all.each do |user|
Task.create(
user: user,
name: Faker::Lorem.word,
description: Faker::Lorem.sentence,
state: Task::STATES.sample
)
end
end
end
end
<file_sep>class Web::UsersController < Web::ApplicationController
def create
@user = User.create(params.require(:user).permit(:email, :password))
if @user.valid?
create_user_session!(@user)
redirect_to user_tasks_path(@user)
else
render :new
end
end
def new
@user = User.new
end
end
<file_sep>class Web::Users::TasksController < Web::ApplicationController
before_action :authenticate_user!, except: [:index]
def index
@tasks = Task.includes(:user).where(
user_id: params[:user_id].to_i
).most_recent_first
end
end
|
295b57a9eb4b64c6867d03f350e83afe076593cc
|
[
"Markdown",
"Ruby"
] | 24 |
Ruby
|
digaev/task_manager
|
4483e43c7aa5361b1582b9c9d9f63192fe19e5b8
|
abadd563e58bdcef578fd790fba852bdd889055c
|
refs/heads/master
|
<repo_name>smart-solution/ovizio<file_sep>/8.0/ovizio_custom_mig/ovizio_custom.py
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
##############################################################################
#
##############################################################################
from openerp import models, fields, api
from openerp.tools.translate import _
from datetime import datetime
from datetime import timedelta
class crm_lead(models.Model):
_name = 'crm.lead'
_inherit = 'crm.lead'
need_demo = fields.Boolena('Needed for Demo')
demo_date = fields.Date('Demo Date')
demo_desc = fields.Text('Demo Description')
demo_state = fields.Selection([('pending','Pending'),('inprogress','In Progress'),
('succesful','Succesful'),('failed','Failed')], 'Demo Status'),
_defaults = {
'demo_state': 'pending',
}
class product_template(osv.osv):
_inherit = 'product.tempalte'
def _get_main_supplier(self, cr ,uid, ids, field_name, args, context=None):
"""get the main supplier"""
if not context:
context = {}
result = {}
for prod in self.browse(cr, uid, ids):
if prod.seller_ids:
result[prod.id] = prod.seller_ids[0].name.id
return result
_columns = {
'supplier_id': fields.function(_get_main_supplier, type="many2one", relation="res.partner", string="Main Supplier", store=True, readonly=False),
'main_supplier_id': fields.many2one('res.partner', 'Main Supplier'),
'critical': fields.boolean('Critical'),
}
class mrp_bom(models.Model):
_inherit = 'mrp.bom'
def _cost_product_get(self, cr, uid, ids, name, args={}, context=None):
boms = self.browse(cr, uid, ids)
res= {}
for bom in boms:
res[bom.id] = bom.product_id.standard_price * bom.product_qty
return res
product_cost = fields.Function(_cost_product_get, method=True, type='float', string='Cost'),
product_delay = fields.Related('product_id', 'produce_delay', type='float', string='Delay'),
product_supplier = fields.Related('product_id', 'supplier_id', type='many2one', relation="res.partner", string='Supplier', store=True),
product_supplier_id = fields.Related('product_id', 'main_supplier_id', type='many2one', relation="res.partner", string='Supplier', store=True),
#
#class sale_order(osv.osv):
#
# _inherit = "sale.order"
#
# _columns = {
# 'date_validity': fields.date('Validity Date'),
# }
#
#class sale_order_line(osv.osv):
#
# _inherit = 'sale.order.line'
#
# def _line_nbr_get(self, cr ,uid, ids, field_name, args, context=None):
# """get the line nunber"""
# if not context:
# context = {}
# result = {}
# l = []
# for line in self.browse(cr, uid, ids):
# l.append(line.id)
# l = sorted(l)
# for i in l:
# result[i] = l.index(i) + 1
# return result
#
# def _delivery_date_get(self, cr ,uid, ids, field_name, args, context=None):
# """get the line delivery date"""
# if not context:
# context = {}
# result = {}
# for line in self.browse(cr, uid, ids):
# if line.product_id:
# order_date = datetime.strptime(line.order_id.date_order, '%Y-%m-%d')
# delivery_date = order_date + timedelta(days=line.product_id.sale_delay)
# result[line.id] = delivery_date.strftime('%Y-%m-%d')
# return result
#
# _columns = {
# 'line_nbr': fields.function(_line_nbr_get, type="integer", string='Number', store=False, readonly=True),
# 'date_delivery': fields.function(_delivery_date_get, type="date", string='Delivery Date', store=False, readonly=True),
# }
#
#
#class res_partner(osv.osv):
#
# _inherit = 'res.partner'
#
# _columns = {
# 'critical': fields.boolean('Critical'),
# }
#
#
#class stock_move(osv.osv):
#
# _inherit = 'stock.move'
#
# _columns = {
# 'product_rack': fields.related('product_id', 'loc_rack', type='char', size=16, string='Product Rack', store=True),
# 'product_row': fields.related('product_id', 'loc_row', type='char', size=16, string='Product Row', store=True),
# 'product_supplier': fields.related('product_id', 'supplier_id', type='many2one', relation="res.partner", string='Product Supplier', store=True),
# }
#
#
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
<file_sep>/8.0/ovizio_custom/report/__init__.py
import account_print_invoice
import bom_structure
<file_sep>/8.0/ovizio_custom/__init__.py
# -*- encoding: utf-8 -*-
##############################################################################
#
#
##############################################################################
import ovizio_custom
#import report_inventory_analysis
import report
import wizard
<file_sep>/scripts/ovizio_product_active.py
# -*- coding: utf-8 -*-
import time
import base64
import xmlrpclib
server='localhost'
dbname='ovizio'
uid=1
pwd='<PASSWORD>'
model = 'product.product'
#replace localhost with the address of the server
sock = xmlrpclib.ServerProxy('http://%s:8069/xmlrpc/object'%(server))
ids = sock.execute(dbname, uid, pwd, 'product.product', 'search', [])
a_datas = sock.execute(dbname, uid, pwd, 'product.product', 'read', ids, ['qty_available'])
#print "A_DATAS:",a_datas
to_active = []
for a_data in a_datas:
if a_data['qty_available'] > 0:
to_active.append(a_data['id'])
print a_data
sock.execute(dbname, uid, pwd, 'product.product', 'write', to_active, {'active':True})
<file_sep>/8.0/mrp_production_order_report_purchase_info/mrp_production_order_report_purchase_info.py
# -*- coding: utf-8 -*-
##############################################################################
#
# Smart Solution bvba
# Copyright (C) 2010-Today Smart Solution BVBA (<http://www.smartsolution.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv, fields
from openerp.tools.translate import _
class stock_move(osv.Model):
_inherit = "stock.move"
_columns = {
'purchase_id': fields.many2one('purchase.order', 'Purchase Order'),
}
class procurement_order(osv.osv):
_inherit = "procurement.order"
def create_procurement_purchase_order(self, cr, uid, procurement, po_vals, line_vals, context=None):
move_obj = self.pool.get('stock.move')
res = super(procurement_order, self).create_procurement_purchase_order(cr, uid, procurement, po_vals, line_vals, context=context)
if procurement.move_id:
move_obj.write(cr, uid, [procurement.move_id.id], {'purchase_id':res})
if procurement.move_id.move_dest_id:
move_obj.write(cr, uid, [procurement.move_id.move_dest_id.id], {'purchase_id':res})
return res
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
<file_sep>/scripts/ovizio_main_supplier.py
# -*- coding: utf-8 -*-
import time
import base64
import xmlrpclib
server='localhost'
dbname='ovizio'
uid=1
pwd='<PASSWORD>'
model = 'product.product'
#replace localhost with the address of the server
sock = xmlrpclib.ServerProxy('http://%s:8069/xmlrpc/object'%(server))
ids = sock.execute(dbname, uid, pwd, 'product.supplierinfo', 'search', [])
a_datas = sock.execute(dbname, uid, pwd, 'product.supplierinfo', 'read', ids, ['name','product_id'])
print "A_DATAS:",a_datas
for a_data in a_datas:
sock.execute(dbname, uid, pwd, 'product.product', 'write', [a_data['product_id'][0]], {'main_supplier_id':a_data['name'][0]})
<file_sep>/8.0/ovizio_custom/report_inventory_analysis.py
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import tools
from osv import fields,osv
from decimal_precision import decimal_precision as dp
class ovizio_report_stock_inventory(osv.osv):
_name = "ovizio.report.stock.inventory"
_description = "Ovizio Stock Statistics"
_auto = False
_columns = {
'date': fields.datetime('Date', readonly=True),
'partner_id':fields.many2one('res.partner.address', 'Partner', readonly=True),
'supplier_id':fields.many2one('res.partner', 'Supplier', readonly=True),
'product_id':fields.many2one('product.product', 'Product', readonly=True),
'product_categ_id':fields.many2one('product.category', 'Product Category', readonly=True),
'location_id': fields.many2one('stock.location', 'Location', readonly=True),
'prodlot_id': fields.many2one('stock.production.lot', 'Lot', readonly=True),
'company_id': fields.many2one('res.company', 'Company', readonly=True),
'product_qty':fields.float('Quantity', digits_compute=dp.get_precision('Product UoM'), readonly=True),
'price_unit' : fields.float('Unit Price', digits_compute=dp.get_precision('Account'), required=True),
'value' : fields.float('Total Value', digits_compute=dp.get_precision('Account'), required=True),
'state': fields.selection([('draft', 'Draft'), ('waiting', 'Waiting'), ('confirmed', 'Confirmed'), ('assigned', 'Available'), ('done', 'Done'), ('cancel', 'Cancelled')], 'State', readonly=True, select=True,
help='When the stock move is created it is in the \'Draft\' state.\n After that it is set to \'Confirmed\' state.\n If stock is available state is set to \'Avaiable\'.\n When the picking it done the state is \'Done\'.\
\nThe state is \'Waiting\' if the move is waiting for another one.'),
'location_type': fields.selection([('supplier', 'Supplier Location'), ('view', 'View'), ('internal', 'Internal Location'), ('customer', 'Customer Location'), ('inventory', 'Inventory'), ('procurement', 'Procurement'), ('production', 'Production'), ('transit', 'Transit Location for Inter-Companies Transfers')], 'Location Type', required=True),
}
def init(self, cr):
tools.drop_view_if_exists(cr, 'ovizio_report_stock_inventory')
cr.execute("""
CREATE OR REPLACE view ovizio_report_stock_inventory AS (
(SELECT
min(m.id) as id, m.date as date,
m.address_id as partner_id, m.location_id as location_id,
m.product_id as product_id, pt.categ_id as product_categ_id, l.usage as location_type,
m.company_id, m.price_unit as price_unit, m.partner_id as supplier_id,
m.state as state, m.prodlot_id as prodlot_id,
coalesce(sum(-m.price_unit * m.product_qty)::decimal, 0.0) as value,
CASE when pt.uom_id = m.product_uom
THEN
coalesce(sum(-m.product_qty)::decimal, 0.0)
ELSE
coalesce(sum(-m.product_qty * pu.factor)::decimal, 0.0) END as product_qty
FROM
stock_move m
LEFT JOIN stock_picking p ON (m.picking_id=p.id)
LEFT JOIN product_product pp ON (m.product_id=pp.id)
LEFT JOIN product_template pt ON (pp.product_tmpl_id=pt.id)
LEFT JOIN product_uom pu ON (pt.uom_id=pu.id)
LEFT JOIN product_uom u ON (m.product_uom=u.id)
LEFT JOIN stock_location l ON (m.location_id=l.id)
GROUP BY
m.id, m.product_id, m.product_uom, m.price_unit, pt.categ_id, m.partner_id, m.address_id, m.location_id, m.location_dest_id,
m.prodlot_id, m.date, m.state, l.usage, m.company_id,pt.uom_id
) UNION ALL (
SELECT
-m.id as id, m.date as date,
m.address_id as partner_id, m.location_dest_id as location_id,
m.product_id as product_id, pt.categ_id as product_categ_id, l.usage as location_type,
m.company_id, m.price_unit as price_unit, m.partner_id as supplier_id,
m.state as state, m.prodlot_id as prodlot_id,
coalesce(sum(m.price_unit * m.product_qty )::decimal, 0.0) as value,
CASE when pt.uom_id = m.product_uom
THEN
coalesce(sum(m.product_qty)::decimal, 0.0)
ELSE
coalesce(sum(m.product_qty * pu.factor)::decimal, 0.0) END as product_qty
FROM
stock_move m
LEFT JOIN stock_picking p ON (m.picking_id=p.id)
LEFT JOIN product_product pp ON (m.product_id=pp.id)
LEFT JOIN product_template pt ON (pp.product_tmpl_id=pt.id)
LEFT JOIN product_uom pu ON (pt.uom_id=pu.id)
LEFT JOIN product_uom u ON (m.product_uom=u.id)
LEFT JOIN stock_location l ON (m.location_dest_id=l.id)
GROUP BY
m.id, m.product_id, m.product_uom, m.price_unit, pt.categ_id, m.partner_id, m.address_id, m.location_id, m.location_dest_id,
m.prodlot_id, m.date, m.state, l.usage, m.company_id,pt.uom_id
)
);
""")
ovizio_report_stock_inventory()
<file_sep>/README.md
# ovizio
Ovizio devs
<file_sep>/7.0/ovizio_custom/wizard/mrp_stock_move.py
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
##############################################################################
#
##############################################################################
from osv import osv, fields
from openerp.tools.translate import _
class wizard_mrp_stock_move(osv.osv_memory):
_name = 'wizard.mrp.stock.move'
def stock_move_get(self, cr, uid, ids, context=None):
"""Get products to consume stock moves for a manufacturing order"""
mod_obj = self.pool.get('ir.model.data')
mo = self.pool.get('mrp.production').browse(cr, uid, context['active_id'])
stock_move_ids = [move.id for move in mo.move_lines]
try:
tree_view_id = mod_obj.get_object_reference(cr, uid, 'stock', 'view_move_tree')[1]
except ValueError:
tree_view_id = False
try:
form_view_id = mod_obj.get_object_reference(cr, uid, 'stock', 'view_move_form')[1]
except ValueError:
form_view_id = False
return {'name': _('Products to Consume'),
'context': context,
'view_type': 'form',
'view_mode': 'tree,form',
'res_model': 'stock.move',
'views': [(tree_view_id, 'tree'), (form_view_id, 'form')],
'type': 'ir.actions.act_window',
'domain': [('id','in',stock_move_ids)]
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
e9050548a147bac2efaa83148f9c1c42bcb83423
|
[
"Markdown",
"Python"
] | 9 |
Python
|
smart-solution/ovizio
|
df56b51336dbeafa9eb131f3931fa895d7e61bd3
|
b87d089c8a0460cdd4203510d655ec86d3e1c8ee
|
refs/heads/master
|
<repo_name>KeitaMoromizato/tf-idf-test<file_sep>/src/app.js
"use strict";
import mysql from 'mysql';
import _, {find, sortBy} from 'lodash';
import getTfFromArticle from './getTfFromArticle';
const connection = mysql.createConnection({
host: process.env.DB_HOST,
user: process.env.DB_USER,
password: <PASSWORD>.DB_<PASSWORD>,
database: process.env.DB_DATABASE
});
connection.connect();
const targetId = 113;
const query = `
SELECT id, MA.body as body, title FROM articles
LEFT JOIN (
SELECT article_id as article, body, MAX(createdAt) FROM markdown_articles
GROUP BY article
) MA ON MA.article = articles.id;
`;
const calcRelation = function(base, target) {
return base.reduce((sum, e) => {
const same = find(target, e2 => e2.word === e.word);
return sum + e.tfidf * (same ? same.tfidf : 0);
}, 0.0);
};
const getIdf = function(results, word) {
const df = results.reduce((sum, e) => {
return sum + e.words.reduce((sum2, e2) => sum2 + (e2.word === word ? e2.tf : 0), 0);
}, 0);
return Math.log(results.length / df) + 1;
}
connection.query(query, (error, rows, fields) => {
if (error) return console.error(error);
const promises = rows.map(row => getTfFromArticle(row));
Promise.all(promises).then(results => {
const TFIDFs = results.map(target => {
const tfidf = target.words.map(word => Object({word: word.word, tfidf: word.tf * getIdf(results, word.word)}));
return {id: target.id, tfidf: sortBy(tfidf, e => e.tfidf)};
});
const target = find(TFIDFs, target => target.id === targetId);
const result = _(TFIDFs).chain()
.map(e => Object({id: e.id, title: find(rows, r => r.id === e.id).title, relation: calcRelation(target.tfidf, e.tfidf)}))
.sortBy(e => e.relation)
.value();
console.log("result", result);
}).catch(error => {
console.error(error);
});
});
|
a9e944360e388fc8b18e29779a16d001180fdc55
|
[
"JavaScript"
] | 1 |
JavaScript
|
KeitaMoromizato/tf-idf-test
|
7b1dba093c73ff1c70ee8d9abf8c001024f0c1ec
|
e8eb4d87e3c93ea7e62f6c6df63240de7675db66
|
refs/heads/master
|
<repo_name>VeriVD/qgis_VeriVD<file_sep>/verivd/core/plugin_info.py
from qgis.core import Qgis, QgsMessageLog
DEBUG = True
DEBUG_KEEP_LAYER = False
def info(msg, level=Qgis.Info):
QgsMessageLog.logMessage(str(msg), "VeriVD", level)
def dbg_info(msg: str):
if DEBUG:
info(msg)
<file_sep>/verivd/core/models/checker_model.py
"""
/***************************************************************************
VeriVD plugin
Copyright (C) 2019 <NAME>
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from qgis.core import QgsProperty, QgsSymbolLayer, QgsVectorLayer, QgsWkbTypes
from qgis.gui import QgisInterface
from verivd.core.gpkg_data import MARKER_SHAPE
from verivd.core.layer_info import LayerInfo
from verivd.core.layer_list_model import LayerListModel
from verivd.core.symbology_type import SymbologyType
from verivd.core.topic_layers import TOPIC_LAYERS
from verivd.core.veri_meta_layer import VeriMetaLayer
class CheckerLayerModel(LayerListModel):
def __init__(self, iface: QgisInterface):
super().__init__(iface)
def reload(self):
self.beginResetModel()
self._veri_meta_layers = []
if not self.gpkg_data:
return
checker_dict = self.gpkg_data.load_table_list("000_checker_decompte")
for topic in TOPIC_LAYERS:
if topic in checker_dict:
display_name = f"Checker - {topic}: {str(checker_dict[topic])}"
self._veri_meta_layers.append(VeriMetaLayer(topic, display_name))
self.endResetModel()
def group_name(self, layer):
return f"Résultat du checker - {layer}"
def layer_infos(self, layer: str) -> [LayerInfo]:
sql_request = f"\"topic\" = '{layer}'"
layer_infos = (
LayerInfo(
display_name=f"Justificatifs - {layer} point",
layer_name="justificatif_point",
sql_request=f"\"layer\" = '000_checker_point' AND {sql_request}",
symbology_type=SymbologyType.QML,
),
LayerInfo(
display_name=f"Justificatifs - {layer} ligne",
layer_name="justificatif_line",
# les lignes sont dans la couche _surface
sql_request=f"\"layer\" = '000_checker_surface' AND {sql_request}",
symbology_type=SymbologyType.QML,
),
LayerInfo(
display_name=f"Justificatifs - {layer} surface",
layer_name="justificatif_polygon",
sql_request=f"\"layer\" = '000_checker_surface' AND {sql_request}",
symbology_type=SymbologyType.QML,
),
LayerInfo(
display_name=f"Justificatifs - {layer} sans géométrie",
layer_name="justificatif_nogeometry",
sql_request=f"\"layer\" = '000_checker_sans_geometrie' AND {sql_request}",
symbology_type=SymbologyType.NO_SYMBOL,
),
LayerInfo(
display_name=f"Checker - {layer} point",
layer_name="000_Checker_Point",
sql_request=sql_request,
symbology_type=SymbologyType.RANDOM_CATEGORIZED,
category_field="description",
symbology_data_defined_properties={QgsSymbolLayer.PropertySize: QgsProperty.fromValue(5)},
),
LayerInfo(
display_name=f"Checker - {layer} surface",
layer_name="000_Checker_Surface",
sql_request=sql_request,
symbology_type=SymbologyType.RANDOM_CATEGORIZED,
category_field="description",
symbology_data_defined_properties={QgsSymbolLayer.PropertyStrokeWidth: QgsProperty.fromValue(2)},
opacity=0.5,
),
LayerInfo(
display_name=f"Checker - {layer} sans géométrie",
layer_name="000_Checker_Sans_geometrie",
sql_request=sql_request,
symbology_type=SymbologyType.NO_SYMBOL,
),
LayerInfo(
display_name="Périmetre du lot",
layer_name="112_itf_mise_a_jourrp",
),
)
return layer_infos
def post_process_layer(self, layer: QgsVectorLayer, position: int):
if layer.geometryType() == QgsWkbTypes.PointGeometry:
for symbol in layer.renderer().symbols(self.layer_context(layer)):
symbol.symbolLayer(0).setShape(MARKER_SHAPE[position % (len(MARKER_SHAPE) - 1)])
<file_sep>/verivd/core/models/base_model.py
"""
/***************************************************************************
VeriVD plugin
Copyright (C) 2019 <NAME>
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from qgis.gui import QgisInterface
from verivd.core.layer_info import LayerInfo
from verivd.core.layer_list_model import LayerListModel
from verivd.core.symbology_type import SymbologyType
from verivd.core.veri_meta_layer import VeriMetaLayer
BASE_LAYER_INFOS = {
"Base-Biens_fonds": [
LayerInfo(display_name="DDP Numéro", layer_name="006_ITF_BF_Pos_DDP"),
LayerInfo(display_name="Bien-fonds Numéro", layer_name="006_ITF_BF_Pos_Bien_fonds"),
LayerInfo(display_name="Point limite", layer_name="006_ITF_BF_Point_limite"),
LayerInfo(display_name="DDP", layer_name="006_ITF_BF_DDP"),
LayerInfo(display_name="Bien-fonds", layer_name="006_ITF_BF_Bien_fonds"),
LayerInfo(display_name="Périmetre du lot", layer_name="112_itf_mise_a_jourrp"),
],
"Base-Couverture_du_sol": [
LayerInfo(display_name="CS Nom et numéro", layer_name="002_ITF_CS_Pos_Surface_CS"),
LayerInfo(display_name="Point particulier CS", layer_name="002_ITF_CS_Point_particulier"),
LayerInfo(display_name="Surface CS", layer_name="002_ITF_CS_Surface_CS"),
LayerInfo(
display_name="Bâtiment",
layer_name="002_ITF_CS_Surface_CS",
sql_request='"type" = "batiment"',
symbology_type=SymbologyType.SIMPLE,
symbology_properties={
"color": "255, 210, 210",
"border_color": "black",
},
),
LayerInfo(display_name="Périmetre du lot", layer_name="112_itf_mise_a_jourrp"),
],
"Base-Objets_divers": [
LayerInfo(display_name="OD linéaire", layer_name="003_ITF_OD_Element_lineaire"),
LayerInfo(display_name="OD surfacique", layer_name="003_ITF_OD_Element_surfacique"),
LayerInfo(display_name="Point particulier OD", layer_name="003_ITF_OD_Point_particulier"),
LayerInfo(display_name="OD linéaire Nom et Numéro", layer_name="003_ITF_OD_Pos_Element_lineaire"),
LayerInfo(display_name="OD surf Nom et Numéro", layer_name="003_ITF_OD_Pos_Element_surfacique"),
LayerInfo(display_name="Périmetre du lot", layer_name="112_itf_mise_a_jourrp"),
],
"Base-Nomenclature": [
LayerInfo(display_name="Lieux dits texte", layer_name="005_itf_no_pos_lieudit"),
LayerInfo(display_name="Nom local texte", layer_name="005_itf_no_posnom_local"),
LayerInfo(
display_name="Nom local",
layer_name="005_ITF_NO_Nom_local",
symbology_type=SymbologyType.RANDOM_CATEGORIZED,
category_field="name",
),
LayerInfo(display_name="Périmetre du lot", layer_name="112_itf_mise_a_jourrp"),
],
"Base-Conduite": [
LayerInfo(display_name="CO Conduite", layer_name="007_itf_co_element_conduite"),
LayerInfo(display_name="CO Conduite Nom", layer_name="007_itf_co_poselement_conduite"),
LayerInfo(display_name="Périmetre du lot", layer_name="112_itf_mise_a_jourrp"),
],
"Base-Points_fixes": [
LayerInfo(display_name="PFP-PFA3", layer_name="001_itf_pf_points_fixes"),
LayerInfo(display_name="Périmetre du lot", layer_name="112_itf_mise_a_jourrp"),
],
"Base-Altimetrie": [
LayerInfo(display_name="Courbes de niveau", layer_name="004_itf_al_courbes_de_niveau"),
LayerInfo(display_name="Périmetre du lot", layer_name="112_itf_mise_a_jourrp"),
],
"Base-Repartition_des_plans": [
LayerInfo(display_name="Plan", layer_name="105_itf_rp"),
LayerInfo(display_name="Périmetre du lot", layer_name="112_itf_mise_a_jourrp"),
],
"Base-Adresses_des_batiments": [
LayerInfo(display_name="Numéro d'entrée", layer_name="009_itf_bat_posentree_batiment"),
LayerInfo(display_name="Nom de localisation", layer_name="009_itf_bat_posnom_localisation"),
LayerInfo(display_name="Point de départ des tronçons", layer_name="009_itf_bat_point_depart"),
LayerInfo(display_name="Entrée de bâtiment", layer_name="009_itf_bat_entree_batiment"),
LayerInfo(display_name="Tronçon de rue", layer_name="009_itf_bat_troncon_rue"),
LayerInfo(display_name="Lieu dénommé", layer_name="009_itf_bat_lieu_denomme"),
LayerInfo(display_name="Périmetre du lot", layer_name="112_itf_mise_a_jourrp"),
],
"Base-Limites_territoriales": [
LayerInfo(display_name="Point limite territorial", layer_name="008_itf_lt_point_limite_ter"),
LayerInfo(display_name="Limites territoriales", layer_name="008_itf_lt_autre_limite"),
LayerInfo(display_name="Périmetre du lot", layer_name="112_itf_mise_a_jourrp"),
],
"Base-Tous_les_topics": [
LayerInfo(display_name="CS Nom et numéro", layer_name="002_ITF_CS_Pos_Surface_CS"),
LayerInfo(display_name="DDP Numéro", layer_name="006_ITF_BF_Pos_DDP"),
LayerInfo(display_name="Bien-fonds Numéro", layer_name="006_ITF_BF_Pos_Bien_fonds"),
LayerInfo(display_name="CO Conduite Nom", layer_name="007_itf_co_poselement_conduite"),
LayerInfo(display_name="Numéro d'entrée", layer_name="009_itf_bat_posentree_batiment"),
LayerInfo(display_name="OD lin Nom et Numéro", layer_name="003_ITF_OD_Pos_Element_lineaire"),
LayerInfo(display_name="OD surf Nom et Numéro", layer_name="003_ITF_OD_Pos_Element_surfacique"),
LayerInfo(display_name="Nom de localisation", layer_name="009_itf_bat_posnom_localisation"),
LayerInfo(display_name="Lieux dits texte", layer_name="005_itf_no_pos_lieudit"),
LayerInfo(display_name="Nom local texte", layer_name="005_itf_no_posnom_local"),
LayerInfo(
display_name="Nom local",
layer_name="005_ITF_NO_Nom_local",
symbology_type=SymbologyType.RANDOM_CATEGORIZED,
category_field="name",
opacity=0.5,
visibility=False,
),
LayerInfo(display_name="PFP-PFA3", layer_name="001_itf_pf_points_fixes"),
LayerInfo(display_name="Point limite", layer_name="006_ITF_BF_Point_limite"),
LayerInfo(
display_name="Point particulier CS",
layer_name="002_ITF_CS_Point_particulier",
),
LayerInfo(
display_name="Point limite territorial",
layer_name="008_itf_lt_point_limite_ter",
),
LayerInfo(display_name="OD linéaire", layer_name="003_ITF_OD_Element_lineaire"),
LayerInfo(
display_name="OD surfacique",
layer_name="003_ITF_OD_Element_surfacique",
),
LayerInfo(
display_name="Point particulier OD",
layer_name="003_ITF_OD_Point_particulier",
),
LayerInfo(
display_name="Point de départ des tronçons",
layer_name="009_itf_bat_point_depart",
visibility=False,
),
LayerInfo(
display_name="Entrée de bâtiment",
layer_name="009_itf_bat_entree_batiment",
visibility=False,
),
LayerInfo(
display_name="Tronçon de rue",
layer_name="009_itf_bat_troncon_rue",
visibility=False,
),
LayerInfo(
display_name="<NAME>",
layer_name="009_itf_bat_lieu_denomme",
visibility=False,
),
LayerInfo(display_name="CO Conduite", layer_name="007_itf_co_element_conduite"),
LayerInfo(
display_name="Courbes de niveau",
layer_name="004_itf_al_courbes_de_niveau",
visibility=False,
),
LayerInfo(display_name="Plan", layer_name="105_itf_rp"),
LayerInfo(display_name="Limites territoriales", layer_name="008_itf_lt_autre_limite"),
LayerInfo(display_name="DDP", layer_name="006_ITF_BF_DDP"),
LayerInfo(display_name="Bien-fonds", layer_name="006_ITF_BF_Bien_fonds"),
LayerInfo(display_name="Surface CS", layer_name="002_ITF_CS_Surface_CS"),
LayerInfo(display_name="Périmetre du lot", layer_name="112_itf_mise_a_jourrp"),
],
}
# adding justificatifs layers
for veri_vd_layer, layer_infos in BASE_LAYER_INFOS.items():
layer_names = ", ".join([f"'{li.layer_name.lower()}'" for li in layer_infos])
BASE_LAYER_INFOS[veri_vd_layer] = [
LayerInfo(
display_name="Justificatifs - point",
layer_name="justificatif_point",
sql_request=f'lower("layer") in ({layer_names})',
symbology_type=SymbologyType.QML,
),
LayerInfo(
display_name="Justificatifs - ligne",
layer_name="justificatif_line",
sql_request=f'lower("layer") in ({layer_names})',
symbology_type=SymbologyType.QML,
),
LayerInfo(
display_name="Justificatifs - surface",
layer_name="justificatif_polygon",
sql_request=f'lower("layer") in ({layer_names})',
symbology_type=SymbologyType.QML,
),
LayerInfo(
display_name="Justificatifs - sans géométrie",
layer_name="justificatif_nogeometry",
sql_request=f'lower("layer") in ({layer_names})',
symbology_type=SymbologyType.NO_SYMBOL,
),
] + layer_infos
def create_veri_meta_layers():
return (
VeriMetaLayer("Base-Tous_les_topics", "Base - Tous les topics"),
VeriMetaLayer("Base-Points_fixes", "Base - Points fixes"),
VeriMetaLayer("Base-Couverture_du_sol", "Base - Couverture du sol"),
VeriMetaLayer("Base-Objets_divers", "Base - Objets divers"),
VeriMetaLayer("Base-Altimetrie", "Base - Altimétrie"),
VeriMetaLayer("Base-Nomenclature", "Base - Nomenclature"),
VeriMetaLayer("Base-Biens_fonds", "Base - Biens fonds"),
VeriMetaLayer("Base-Conduite", "Base - Conduite"),
VeriMetaLayer("Base-Limites_territoriales", "Base - Limites territoriales"),
VeriMetaLayer("Base-Adresses_des_batiments", "Base - Adresses des bâtiments"),
VeriMetaLayer("Base-Repartition_des_plans", "Base - Répartition des plans"),
)
class BaseLayerModel(LayerListModel):
def __init__(self, iface: QgisInterface):
super().__init__(iface, create_veri_meta_layers())
def layer_infos(self, layer: str) -> [LayerInfo]:
return BASE_LAYER_INFOS[layer]
def reload(self):
self.veri_meta_layers = create_veri_meta_layers()
<file_sep>/verivd/core/gpkg_data.py
#!/usr/bin/env python
""" A simple class used to load a layer in QGIS """
# Some commonly used imports
import os.path
from random import randrange
from typing import Dict
from qgis.core import (
QgsCategorizedSymbolRenderer,
QgsCoordinateReferenceSystem,
QgsExpressionContextUtils,
QgsFillSymbol,
QgsLineSymbol,
QgsMarkerSymbol,
QgsPropertyCollection,
QgsRenderContext,
QgsRendererCategory,
QgsSimpleFillSymbolLayer,
QgsSimpleMarkerSymbolLayerBase,
QgsSymbol,
QgsVectorLayer,
QgsWkbTypes,
)
from qgis.gui import QgisInterface
from qgis.PyQt.QtGui import QColor
from verivd.core.layer_info import LayerInfo
from verivd.core.plugin_info import DEBUG, DEBUG_KEEP_LAYER, dbg_info
from verivd.core.symbology_type import SymbologyType
MARKER_SHAPE = (
QgsSimpleMarkerSymbolLayerBase.Square,
QgsSimpleMarkerSymbolLayerBase.Diamond,
QgsSimpleMarkerSymbolLayerBase.Pentagon,
QgsSimpleMarkerSymbolLayerBase.Triangle,
QgsSimpleMarkerSymbolLayerBase.EquilateralTriangle,
QgsSimpleMarkerSymbolLayerBase.Star,
QgsSimpleMarkerSymbolLayerBase.Arrow,
QgsSimpleMarkerSymbolLayerBase.Circle,
QgsSimpleMarkerSymbolLayerBase.ArrowHeadFilled,
)
class GpkgData:
"""This class Contain generic attributes and methods"""
def __init__(self, iface: QgisInterface, gpkg_path: str):
self.iface = iface
self.path = gpkg_path
# get the path to your plugin directory
self.plugin_path = os.path.dirname(__file__)
self.symbols = None
self.properties = None
self.layers = []
def unique_field_finder(self, layer: QgsVectorLayer, field: str):
fni = layer.fields().indexFromName(field)
return layer.dataProvider().uniqueValues(fni)
def create_simple_symbol(self, layer: QgsVectorLayer, properties: dict):
simple_symbol = None
if layer.geometryType() == QgsWkbTypes.PointGeometry:
simple_symbol = QgsMarkerSymbol.createSimple(properties)
elif layer.geometryType() == QgsWkbTypes.LineGeometry:
simple_symbol = QgsLineSymbol.createSimple(properties)
elif layer.geometryType() == QgsWkbTypes.PolygonGeometry:
simple_symbol = QgsFillSymbol.createSimple(properties)
if layer.renderer():
layer.renderer().setSymbol(simple_symbol)
def change_properties(self, layer: QgsVectorLayer, properties: QgsPropertyCollection):
if layer.renderer() is not None:
# TODO check context
context = QgsRenderContext.fromMapSettings(self.iface.mapCanvas().mapSettings())
context.expressionContext().appendScope(QgsExpressionContextUtils.layerScope(layer))
for symbol in layer.renderer().symbols(context):
symbol.symbolLayer(0).setDataDefinedProperties(properties)
layer.triggerRepaint()
def create_simple_fill_symbol_layer(self, layer: QgsVectorLayer, fill_color: QColor):
# initialize the default symbol for this geometry type
symbol = QgsSymbol.defaultSymbol(layer.geometryType())
# configure a symbol layer
symbol_layer = QgsSimpleFillSymbolLayer(fill_color)
# replace default symbol layer with the configured one
if symbol_layer is not None:
symbol.changeSymbolLayer(0, symbol_layer)
return symbol
def random_cat_symb(self, layer: QgsVectorLayer, field: str, properties: dict):
dbg_info(f'create categorized symbol for field "{field}"')
categories = []
for unique_value in self.unique_field_finder(layer, field):
dbg_info(f" value: {unique_value}")
rand_color = QColor(randrange(0, 256), randrange(0, 256), randrange(0, 256))
symbol = self.create_simple_fill_symbol_layer(layer, fill_color=rand_color)
category = QgsRendererCategory(unique_value, symbol, unique_value)
# TODO: removed encode('Latin-1'), is this causing troubles?
categories.append(category)
renderer = QgsCategorizedSymbolRenderer(field, categories)
if renderer is not None:
layer.setRenderer(renderer)
property_collection = QgsPropertyCollection()
for key, value in properties.items():
property_collection.setProperty(key, value)
self.change_properties(layer, property_collection)
def load_table_list(self, data_source):
topic_field_index = 1
decompte_field_index = 2
layer = self.create_qgis_layer("", data_source)
list_feat_dict = {}
if layer.isValid():
features = layer.getFeatures()
for ft in features:
list_feat_dict[ft[topic_field_index]] = ft[decompte_field_index]
return list_feat_dict
def qml_definition(self, meta_layer_name: str, layer_info: LayerInfo):
qml_spec_file = os.path.join(
self.plugin_path,
"../qml",
f"{meta_layer_name}_{layer_info.layer_name}.qml",
)
qml_gen_file = os.path.join(self.plugin_path, "../qml", f"{layer_info.layer_name}.qml")
# Check if a specific qml file exist for this layer
# if not, check if a generic qml file exist
if os.path.isfile(qml_spec_file):
return qml_spec_file
elif os.path.isfile(qml_gen_file):
return qml_gen_file
dbg_info(f"{qml_spec_file} {qml_gen_file}")
return None
def create_qgis_layer(self, display_name, layer_name, sql_request=None, custom_properties={}):
dbg_info(f"creating QGIS layer {layer_name}")
uri = f"{self.path}|layername={layer_name}"
if sql_request:
uri += f"|subset={sql_request}"
# construct the layer
layer = QgsVectorLayer(uri, display_name, "ogr")
if layer.isSpatial():
layer.setCrs(QgsCoordinateReferenceSystem.fromEpsgId(2056))
for key, value in custom_properties.items():
layer.setCustomProperty(key, value)
return layer
def create_qgis_layers(self, meta_layer_name: str, layer_infos: [LayerInfo]) -> Dict[LayerInfo, QgsVectorLayer]:
layers = {}
for layer_info in layer_infos:
dbg_info(f"Loading layer {layer_info.layer_name}")
layer = self.create_qgis_layer(layer_info.display_name, layer_info.layer_name, sql_request=layer_info.sql_request)
# Set the path to the layer's qml file. The qml file must be name at least with the layer name
if (layer.isValid() and layer.featureCount() != 0) or (DEBUG is True and DEBUG_KEEP_LAYER is True):
if layer_info.symbology_type == SymbologyType.QML:
qml_file = self.qml_definition(meta_layer_name, layer_info)
if qml_file:
layer.loadNamedStyle(qml_file)
else:
self.iface.messageBar().pushWarning(
"VeriVD - fichier QML manquant",
f"{layer_info.display_name} ({layer_info.layer_name}): {qml_file}",
)
elif layer_info.symbology_type == SymbologyType.RANDOM_CATEGORIZED:
self.random_cat_symb(
layer,
layer_info.category_field,
layer_info.symbology_data_defined_properties,
)
elif layer_info.symbology_type == SymbologyType.SIMPLE:
self.create_simple_symbol(layer, layer_info.symbology_properties)
if layer_info.opacity != 1:
layer.setOpacity(layer_info.opacity)
# layer.setSubsetString(layer_info.sql_request)
layers[layer_info] = layer
else:
layers[layer_info] = None
return layers
<file_sep>/verivd/gui/veri_vd_plugin.py
"""
/***************************************************************************
VeriVD
A QGIS plugin
Vérification des mensurations vaudoises
-------------------
begin : 2018-11-15
git sha : $Format:%H$
copyright : (C) 2018 by FSN/OIT
email : <EMAIL>
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
import os.path
from qgis.gui import QgisInterface
from qgis.PyQt.QtCore import QCoreApplication, QLocale, QSettings, Qt, QTranslator
from qgis.PyQt.QtGui import QIcon
from qgis.PyQt.QtWidgets import QAction, QMessageBox
from verivd.core.gpkg_data import GpkgData
from verivd.core.justificatif import HAS_JUSTIFICATIF
from verivd.core.layer_info import LayerInfo
from verivd.core.layer_list_model import LayerListModel
from verivd.core.layer_models import LayerModels
# Initialize layers
from verivd.core.plugin_info import DEBUG
from verivd.gui.veri_vd_dockwidget import VeriVDDockWidget
TEST_FILE = "/Users/denis/Documents/temp/verivd/221116_justif.gpkg"
class VeriVD:
"""QGIS Plugin Implementation."""
def __init__(self, iface: QgisInterface):
self.iface = iface
# initialize plugin directory
self.plugin_dir = os.path.dirname(__file__)
self.gpkg_data = None
self.layer_models = LayerModels(self.iface)
# initialize translation
qgis_locale = QLocale(QSettings().value("locale/userLocale"))
locale_path = os.path.join(os.path.dirname(__file__), "i18n")
self.translator = QTranslator()
self.translator.load(qgis_locale, "veri_vd", "_", locale_path)
QCoreApplication.installTranslator(self.translator)
# Declare instance attributes
self.actions = {}
self.menu_entry = self.tr("&Véri-Vaud")
# TODO: We are going to let the user set this up in a future iteration
self.toolbar = self.iface.addToolBar("VeriVD")
self.toolbar.setObjectName("VeriVD")
self.dock_widget = None
def tr(self, source_text):
return QCoreApplication.translate("veri_vd", source_text)
def initGui(self):
"""Create the menu entries and toolbar icons inside the QGIS GUI."""
self.actions["main"] = QAction(
QIcon(os.path.join(os.path.dirname(__file__), "..", "icons", "icon.png")),
self.tr("Vérification des mensurations vaudoises"),
self.iface.mainWindow(),
)
self.actions["main"].triggered.connect(self.run)
self.iface.addPluginToMenu(self.menu_entry, self.actions["main"])
self.iface.addToolBarIcon(self.actions["main"])
self.dock_widget = VeriVDDockWidget(self.layer_models)
self.iface.addDockWidget(Qt.TopDockWidgetArea, self.dock_widget)
self.dock_widget.file_changed.connect(self.open_gpkg_file)
for model in self.layer_models.models():
model.layers_loaded.connect(lambda layer_name, layers_loaded, model=model: self.__on_layers_loaded(model, layer_name, layers_loaded))
if DEBUG and os.path.exists(TEST_FILE):
self.open_gpkg_file(TEST_FILE)
def unload(self):
"""Removes the plugin menu item and icon from QGIS GUI."""
if self.dock_widget:
self.dock_widget.close()
self.dock_widget.deleteLater()
for action in self.actions.values():
self.iface.removePluginMenu(self.menu_entry, action)
self.iface.removeToolBarIcon(action)
del self.toolbar
def open_gpkg_file(self, file):
if self.gpkg_data is not None and self.layer_models.has_loaded_layer():
if (
QMessageBox.question(
self.dock_widget,
"Veri-VD",
f"Voulez-vous conserver les couches chargées par {self.gpkg_data.path}?",
)
== QMessageBox.No
):
self.layer_models.unload_all_layers()
self.layer_models.reset_models()
if file:
uFile = file.encode("utf-8").decode("utf-8")
self.gpkg_data = GpkgData(self.iface, uFile)
self.layer_models.set_gpkg_data(self.gpkg_data)
self.dock_widget.tabWidget.setEnabled(True)
self.dock_widget.process_justificatif_button.setEnabled(HAS_JUSTIFICATIF)
else:
self.gpkg_data = None
self.dock_widget.tabWidget.setEnabled(False)
self.dock_widget.process_justificatif_button.setEnabled(False)
self.layer_models.set_gpkg_data(None)
def run(self):
"""Run method that loads and starts the plugin"""
self.dock_widget.show()
# Set the first tab to open
self.dock_widget.tabWidget.setCurrentIndex(0)
self.dock_widget.tabWidget.setEnabled(False)
def __on_layers_loaded(self, model: LayerListModel, layer_name: str, layers_loaded: [LayerInfo]):
if model.has_control_layers:
control_layers_loaded = 0
layer_names = []
for layer_info in layers_loaded:
if layer_info.control_layer:
control_layers_loaded += 1
layer_names.append(layer_info.display_name)
if control_layers_loaded:
# self.iface.messageBar().pushWarning("VeriVD", 'Les scripts de vérification ont détecté des éléments problématiques pour le thème "{}:\n {}".'.format(layer_name, '\n'.join(layer_names)))
pass
else:
self.iface.messageBar().pushMessage(
"VeriVD",
"Les scripts de vérification n" "ont pas détecté d" 'élément particulier pour le thème "{}".'.format(layer_name),
)
<file_sep>/README.md
# QGIS VeriVD
Pour installer le plugin, ajouter l'adresse du répertoire suivante dans les paramètres du gestionnaire de plugins
https://github.com/VeriVD/qgis_VeriVD/releases/latest/download/plugins.xml
<file_sep>/utils/create-comment-table.sql
CREATE TABLE 'justificatif_nogeometry' (
id INTEGER PRIMARY KEY AUTOINCREMENT,
session TEXT NOT NULL,
layer TEXT NOT NULL,
topic TEXT NOT NULL,
statut TEXT NOT NULL,
texte TEXT NOT NULL
)
CREATE TABLE 'justificatif_point' (
id INTEGER PRIMARY KEY AUTOINCREMENT,
geometry POINT,
session TEXT NOT NULL,
layer TEXT NOT NULL,
topic TEXT NOT NULL,
statut TEXT NOT NULL,
texte TEXT NOT NULL
)
CREATE TABLE 'justificatif_line' (
id INTEGER PRIMARY KEY AUTOINCREMENT,
geometry COMPOUNDCURVE,
session TEXT NOT NULL,
layer TEXT NOT NULL,
topic TEXT NOT NULL,
statut TEXT NOT NULL,
texte TEXT NOT NULL
)
CREATE TABLE 'justificatif_polygon' (
id INTEGER PRIMARY KEY AUTOINCREMENT,
geometry CURVEPOLYGON,
session TEXT NOT NULL,
layer TEXT NOT NULL,
topic TEXT NOT NULL,
statut TEXT NOT NULL,
texte TEXT NOT NULL
)
INSERT INTO gpkg_contents (table_name, data_type) values ('justificatif_nogeometry','attributes');
INSERT INTO gpkg_contents (table_name, data_type) values ('justificatif_point','features');
INSERT INTO gpkg_geometry_columns (table_name, column_name, geometry_type_name, srs_id, z, m) VALUES ('justificatif_point', 'geometry', 'POINT', 2056, 0, 0);
INSERT INTO gpkg_contents (table_name, data_type) values ('justificatif_line','features');
INSERT INTO gpkg_geometry_columns (table_name, column_name, geometry_type_name, srs_id, z, m) VALUES ('justificatif_line', 'geometry', 'COMPOUNDCURVE', 2056, 0, 0);
INSERT INTO gpkg_contents (table_name, data_type) values ('justificatif_polygon','features');
INSERT INTO gpkg_geometry_columns (table_name, column_name, geometry_type_name, srs_id, z, m) VALUES ('justificatif_polygon', 'geometry', 'CURVEPOLYGON', 2056, 0, 0);
-- insert example
insert into commentaires (
geometry,
geometry_type,
session,
topic,
statut,
texte
) values (
st_geomfromtext('Point(2541375 1151076)'),
'POINT',
'session',
'topic',
'valide',
'mon commentaire');
insert into commentaires (
geometry,
geometry_type,
session,
topic,
statut,
texte
) values (
st_geomfromtext('LineString(2541370 1151070, 2541379 1151071)'),
'LINESTRING',
'session 2',
'topic 2',
'valide 2',
'mon commentaire 2');
<file_sep>/verivd/core/justificatif.py
"""
/***************************************************************************
VeriVDDockWidget
A QGIS plugin
Vérification des mensurations vaudoises
-------------------
begin : 2022
git sha : $Format:%H$
copyright : (C) 2018 by FSN/OIT
email : <EMAIL>
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from datetime import date
from enum import Enum
from qgis.core import (
Qgis,
QgsCompoundCurve,
QgsExpressionContextUtils,
QgsFeature,
QgsFeatureRequest,
QgsProject,
QgsProviderRegistry,
QgsWkbTypes,
edit,
)
from qgis.PyQt.QtCore import QObject, pyqtSignal
from verivd.core.gpkg_data import GpkgData
from verivd.core.plugin_info import dbg_info
HAS_JUSTIFICATIF = True
try:
from qgis.core import QgsProviderSublayerDetails
except ImportError:
HAS_JUSTIFICATIF = False
class GeometryType(Enum):
NO_GEOMETRY = "NO_GEOMETRY"
POLYGON = "POLYGON"
LINE = "LINE"
POINT = "POINT"
class Justificatif(QObject):
# progress:
# 0-20: deleting former features (statut = nouveau)
# 20-50: creating qgis layers
# 50-80: finding justificatif in gpkg
# 80-100: writing to layers
progress_changed = pyqtSignal(int, str)
def __init__(self, parent=None):
self.canceled = False
self.layer_tree_group = None
super().__init__(parent)
def cancel(self):
self.canceled = True
def process_justificatif(self, gpkg_data: GpkgData):
self.canceled = False
progress = 0
justificatif_layer_no_geometry = {
"qgis_layer": None,
"layer_name": "justificatif_nogeometry",
"title": "justificatif - sans géométrie",
"features": [],
"geometry_type": GeometryType.NO_GEOMETRY,
}
justificatif_layer_point = {
"qgis_layer": None,
"layer_name": "justificatif_point",
"title": "justificatif - point",
"features": [],
"geometry_type": GeometryType.POINT,
}
justificatif_layer_line = {
"qgis_layer": None,
"layer_name": "justificatif_line",
"title": "justificatif - lignes",
"features": [],
"geometry_type": GeometryType.LINE,
}
justificatif_layer_polygon = {
"qgis_layer": None,
"layer_name": "justificatif_polygon",
"title": "justificatif - polygones",
"features": [],
"geometry_type": GeometryType.POLYGON,
}
justificatif_layers = {
QgsWkbTypes.NoGeometry: justificatif_layer_no_geometry,
QgsWkbTypes.PointGeometry: justificatif_layer_point,
QgsWkbTypes.LineGeometry: justificatif_layer_line,
QgsWkbTypes.PolygonGeometry: justificatif_layer_polygon,
}
# if self.layer_tree_group is None:
# self.layer_tree_group = QgsProject.instance().layerTreeRoot().insertGroup(0, "Justificatifs")
lpi = 0
for jf in justificatif_layers.values():
veri_vd_id = f'VERID_VD_{jf["geometry_type"].value}'
for ql in QgsProject.instance().mapLayers().values():
if ql.customProperty("verid_vd_id") == veri_vd_id:
jf["qgis_layer"] = ql
break
if jf["qgis_layer"] is None:
jf["qgis_layer"] = gpkg_data.create_qgis_layer(jf["title"], jf["layer_name"], custom_properties={"verid_vd_id": veri_vd_id})
# QgsProject.instance().addMapLayer(jf["qgis_layer"], False)
# self.layer_tree_group.insertLayer(lpi, jf["qgis_layer"])
lpi += 1
justificatif_qgis_layers = [jf["qgis_layer"] for jf in justificatif_layers.values()]
# delete current justificatifs
for layer in justificatif_qgis_layers:
if self.canceled:
return
self.progress_changed.emit(int(progress), f"Suppression des justificatifs de la session dans la couche {layer.name()}")
progress += 5
req = QgsFeatureRequest()
req.setFilterExpression("\"statut\" = 'nouveau'")
features_to_delete = []
for justif_feature in layer.getFeatures(req):
features_to_delete.append(justif_feature.id())
with edit(layer):
layer.deleteFeatures(features_to_delete)
layer_details = QgsProviderRegistry.instance().querySublayers(gpkg_data.path, Qgis.SublayerQueryFlag.ResolveGeometryType)
layers = []
for layer_detail in layer_details:
if self.canceled:
return
progress += 1 / len(layer_details) * 30
if layer_detail.name().startswith("justificatif"):
continue
self.progress_changed.emit(int(progress), f"Creation des couches: {layer_detail.name()}")
dbg_info(f"getting layer {layer_detail.name()}")
options = QgsProviderSublayerDetails.LayerOptions(QgsProject.instance().transformContext())
options.loadDefaultStyle = False
layer = layer_detail.toLayer(options)
layers.append(layer)
for layer in layers:
if self.canceled:
return
progress += 1 / len(layers) * 30
self.progress_changed.emit(int(progress), f"Recherche de justificatif dans {layer_detail.name()}")
req = QgsFeatureRequest()
req.setFilterExpression("\"justificatif\" != ''")
for topic_feature in layer.getFeatures(req):
dbg_info(f"layer {layer.name()}: found feature {topic_feature.id()}")
geometry_type = topic_feature.geometry().type()
justif_layer = justificatif_layers.get(geometry_type, justificatif_layer_no_geometry)
justif_feature = QgsFeature(justif_layer["qgis_layer"].fields())
if (
justif_layer["qgis_layer"].wkbType() == QgsWkbTypes.CompoundCurve
and topic_feature.geometry().wkbType() == QgsWkbTypes.CircularString
):
g = QgsCompoundCurve()
g.addCurve(topic_feature.geometry().constGet().clone())
justif_feature.setGeometry(g)
else:
justif_feature.setGeometry(topic_feature.geometry())
justif_feature["layer"] = layer.name().lower()
if topic_feature.fields().indexFromName("topic") >= 0:
justif_feature["topic"] = topic_feature["topic"]
else:
justif_feature["topic"] = None
justif_feature["session"] = date.today().isoformat()
justif_feature["statut"] = "nouveau"
justif_feature["texte"] = topic_feature["justificatif"]
justif_feature["operateur"] = QgsExpressionContextUtils.globalScope().variable("user_full_name")
justif_layer["features"].append(justif_feature)
for layer in justificatif_layers.values():
dbg_info(f"couche justificatif {layer['geometry_type'].value}: enregistrement de {len(layer['features'])} objets")
if len(layer["features"]):
self.progress_changed.emit(
int(progress), f"Ecriture des justificatifs ({len(layer['features'])}) dans la couche {layer['layer_name']}"
)
qgs_layer = layer["qgis_layer"]
dbg_info(f"qgis layer valid: {qgs_layer.isValid()}")
with edit(qgs_layer):
ok = qgs_layer.addFeatures(layer["features"])
dbg_info(f"features added: {ok}")
progress += 5
self.progress_changed.emit(100, None)
<file_sep>/verivd/gui/veri_vd_dockwidget.py
"""
/***************************************************************************
VeriVDDockWidget
A QGIS plugin
Vérification des mensurations vaudoises
-------------------
begin : 2018-11-15
git sha : $Format:%H$
copyright : (C) 2018 by FSN/OIT
email : <EMAIL>
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
import os
from qgis.gui import QgsFileWidget
from qgis.PyQt import uic
from qgis.PyQt.QtCore import QModelIndex, Qt, pyqtSignal
from qgis.PyQt.QtWidgets import QApplication, QDockWidget, QProgressDialog
from verivd.core.justificatif import HAS_JUSTIFICATIF, Justificatif
from verivd.gui.help import (
MESSAGE_BASE,
MESSAGE_CHECKER,
MESSAGE_ILIVALIDATOR,
MESSAGE_VERIF,
)
FORM_CLASS, _ = uic.loadUiType(os.path.join(os.path.dirname(__file__), "../ui/veri_vd_dockwidget_base.ui"))
class VeriVDDockWidget(QDockWidget, FORM_CLASS):
file_changed = pyqtSignal(str)
closingPlugin = pyqtSignal()
def __init__(self, layer_models, parent=None):
super().__init__(parent)
self.setupUi(self)
self.tabWidget.setEnabled(False)
self.layer_models = layer_models
self.file_widget.setDialogTitle("Ouvrir un fichier Geopackage")
self.file_widget.setRelativeStorage(QgsFileWidget.Absolute)
self.file_widget.setFilter("fichiers Geopackage (*.gpkg *.GPKG)")
self.base_help_label.setText(MESSAGE_BASE)
self.checker_help_label.setText(MESSAGE_CHECKER)
self.ili_help_label.setText(MESSAGE_ILIVALIDATOR)
self.verif_help_label.setText(MESSAGE_VERIF)
self.base_list_view.setModel(layer_models.base_layer_model)
self.verif_list_view.setModel(layer_models.verif_layer_model)
self.ili_validator_list_view.setModel(layer_models.ili_validator_layer_model)
self.checker_list_view.setModel(layer_models.checker_layer_model)
layer_models.ili_validator_layer_model.modelReset.connect(self.update_ili_tab)
layer_models.ili_validator_layer_model.dataChanged.connect(self.update_ili_tab)
layer_models.checker_layer_model.modelReset.connect(self.update_checker_tab)
layer_models.checker_layer_model.dataChanged.connect(self.update_checker_tab)
self.file_widget.fileChanged.connect(self.file_changed)
self.show_help_button.clicked.connect(self.show_help)
self.show_help_button.click()
self.process_justificatif_button.setEnabled(False)
if HAS_JUSTIFICATIF:
self.process_justificatif_button.clicked.connect(self.process_justificatif_clicked)
else:
self.process_justificatif_button.setToolTip("Pour générer les justificatifs, QGIS 3.22 est nécessaire.")
def update_checker_tab(self):
has_rows = self.layer_models.checker_layer_model.rowCount(QModelIndex()) > 0
self.tabWidget.setTabEnabled(2, has_rows)
def update_ili_tab(self):
has_rows = self.layer_models.ili_validator_layer_model.rowCount(QModelIndex()) > 0
self.tabWidget.setTabEnabled(1, has_rows)
def closeEvent(self, event):
self.closingPlugin.emit()
event.accept()
def show_help(self, show: bool):
self.base_help_frame.setVisible(show)
self.checker_help_frame.setVisible(show)
self.ili_help_frame.setVisible(show)
self.verif_help_frame.setVisible(show)
def process_justificatif_clicked(self):
j = Justificatif(self)
p = QProgressDialog("Traitement des justificatifs …", "Annuler", 0, 100, self)
p.setWindowModality(Qt.WindowModal)
p.show()
def update_progress(progress, text):
p.setValue(progress)
p.setLabelText(text)
QApplication.processEvents()
j.progress_changed.connect(update_progress)
j.process_justificatif(self.layer_models.gpkg_data)
<file_sep>/verivd/core/layer_models.py
"""
/***************************************************************************
VeriVD plugin
Copyright (C) 2019 <NAME>
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from qgis.gui import QgisInterface
from verivd.core.layer_list_model import LayerListModel
from verivd.core.models.base_model import BaseLayerModel
from verivd.core.models.checker_model import CheckerLayerModel
from verivd.core.models.ili_validator_model import IliValidatorLayerModel
from verivd.core.models.verif_model import VerifLayerModel
class LayerModels:
def __init__(self, iface: QgisInterface):
self.gpkg_data = None
self.verif_layer_model = VerifLayerModel(iface)
self.ili_validator_layer_model = IliValidatorLayerModel(iface)
self.checker_layer_model = CheckerLayerModel(iface)
self.base_layer_model = BaseLayerModel(iface)
def set_gpkg_data(self, gpkg_data):
self.gpkg_data = gpkg_data
self.verif_layer_model.gpkg_data = gpkg_data
self.ili_validator_layer_model.gpkg_data = gpkg_data
self.checker_layer_model.gpkg_data = gpkg_data
self.base_layer_model.gpkg_data = gpkg_data
def unload_all_layers(self):
self.verif_layer_model.unload_all()
self.ili_validator_layer_model.unload_all()
self.checker_layer_model.unload_all()
self.base_layer_model.unload_all()
def models(self) -> [LayerListModel]:
return (
self.verif_layer_model,
self.ili_validator_layer_model,
self.checker_layer_model,
self.base_layer_model,
)
def has_loaded_layer(self) -> bool:
"""
Returns if any of the models has a loaded layer
"""
for model in self.models():
for layer in model.veri_meta_layers:
if layer.loaded:
return True
return False
def reset_models(self):
for model in self.models():
model.reload()
<file_sep>/verivd/core/symbology_type.py
"""
/***************************************************************************
VeriVD plugin
Copyright (C) 2019 <NAME>
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from enum import Enum
class SymbologyType(Enum):
NO_SYMBOL = 1
QML = 2
SIMPLE = 3
RANDOM_CATEGORIZED = 4
<file_sep>/verivd/tests/test_models.py
"""
/***************************************************************************
VeriVD plugin
Copyright (C) 2019 <NAME>
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from qgis.testing import start_app, unittest
from qgis.testing.mocked import get_iface
from verivd.core.gpkg_data import GpkgData
from verivd.core.layer_models import LayerModels
from verivd.core.models.base_model import BASE_LAYER_INFOS, BaseLayerModel
from verivd.core.models.verif_model import VERIF_LAYER_INFOS, VerifLayerModel
from verivd.core.symbology_type import SymbologyType
start_app()
DATA_PATH = "133_AF2_2440_NUM.gpkg"
MISSING_QML = ("101_verif_ddp_segment_bf_modifie",)
class OfflineConverterTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.iface = get_iface()
def setUp(self):
pass
def test_base_model_completeness(self):
model = BaseLayerModel(self.iface)
for layer in model._veri_meta_layers:
self.assertTrue(layer.name in BASE_LAYER_INFOS, layer.name)
def test_verif_model_completeness(self):
model = VerifLayerModel(self.iface)
for layer in model._veri_meta_layers:
self.assertTrue(layer.name in VERIF_LAYER_INFOS, layer.name)
def test_qml(self):
layer_models = LayerModels(self.iface)
gpkg_data = GpkgData(self.iface, DATA_PATH)
layer_models.set_gpkg_data(gpkg_data)
for model in layer_models.models():
for veri_layer in model._veri_meta_layers:
layer_infos = model.layer_infos(veri_layer.name)
for layer_info in layer_infos:
if layer_info.symbology_type == SymbologyType.QML:
if layer_info.layer_name in MISSING_QML:
continue
self.assertIsNotNone(
gpkg_data.qml_definition(veri_layer.name, layer_info),
f"Layer {layer_info.layer_name} has no QML file",
)
<file_sep>/verivd/core/layer_info.py
"""
/***************************************************************************
VeriVD plugin
Copyright (C) 2019 <NAME>
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from verivd.core.symbology_type import SymbologyType
class LayerInfo:
def __init__(
self,
display_name: str,
layer_name: str,
symbology_type: SymbologyType = SymbologyType.QML,
symbology_properties: dict = {},
symbology_data_defined_properties: dict = {},
category_field=None,
sql_request: str = "",
visibility: bool = True,
opacity: float = 1,
control_layer: bool = False,
):
"""
:param display_name:
:param layer_name:
:param symbology_type:
:param symbology_properties:
:param symbology_data_defined_properties:
:param category_field:
:param sql_request:
:param visibility:
:param opacity:
:param control_layer: if True, the plugin will report if no control layer has been loaded (saying no errors have been encountered)
"""
self.display_name = display_name
self.layer_name = layer_name
self.sql_request = sql_request
self.symbology_type = symbology_type
self.symbology_properties = symbology_properties
self.symbology_data_defined_properties = symbology_data_defined_properties
self.category_field = category_field
self.opacity = opacity
self.visibility = visibility
self.control_layer = control_layer
<file_sep>/verivd/core/layer_list_model.py
"""
/***************************************************************************
VeriVD plugin
Copyright (C) 2019 <NAME>
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from qgis.core import (
QgsExpressionContextUtils,
QgsLayerTree,
QgsLayerTreeGroup,
QgsProject,
QgsRenderContext,
QgsVectorLayer,
)
from qgis.gui import QgisInterface
from qgis.PyQt.QtCore import QAbstractListModel, QModelIndex, Qt, pyqtSignal
from verivd.core.gpkg_data import GpkgData
from verivd.core.layer_info import LayerInfo
from verivd.core.veri_meta_layer import VeriMetaLayer
Debug = True
VERIVD_GROUP_LAYER_ID = "verivd-layergropup-id"
class LayerListModel(QAbstractListModel):
# signal emitted when layers are loaded (group_name, loaded_layers)
layers_loaded = pyqtSignal(str, list)
def __init__(
self,
iface: QgisInterface,
layers: [VeriMetaLayer] = [],
has_control_layers: bool = False,
):
"""
:param iface: the QgisInterface
:param layers: pre-load the model with data
:param has_control_layers: if True, the plugin will report if no control_layers have been loaded
"""
self.iface = iface
self._veri_meta_layers: [VeriMetaLayer] = layers
self._gpkg_data: GpkgData = None
self._has_control_layers = has_control_layers
super().__init__()
self.__is_removing_layer = False
QgsProject.instance().layersWillBeRemoved.connect(self.__layers_will_be_removed)
@property
def gpkg_data(self):
return self._gpkg_data
@gpkg_data.setter
def gpkg_data(self, data):
self._gpkg_data = data
self.reload()
@property
def veri_meta_layers(self) -> [VeriMetaLayer]:
return self._veri_meta_layers
@veri_meta_layers.setter
def veri_meta_layers(self, value: [VeriMetaLayer]):
self.beginResetModel()
self._veri_meta_layers = value
self.endResetModel()
@property
def has_control_layers(self) -> bool:
return self._has_control_layers
def reload(self):
"""
Reloads the data when the data has changed
might be re-implemented
"""
pass
def layer_infos(self, layer: str) -> [LayerInfo]:
"""
Returns the list of LayerInfo for the current category and given layer name
Must be re-implemented
:param layer: the layer name
"""
return None
def group_name(self, layer: str):
"""
Returns the name of the group to be created in the layer tree
might be re-implemented
:param layer: the layer name
"""
return layer
def post_process_layer(self, layer: QgsVectorLayer, position: int):
"""
Post-process the QGIS layer before adding it to the map and layer tree
Might be reimplemented
:param layer: the layer
:param position: the position (index) in the group
"""
pass
def rowCount(self, parent: QModelIndex = ...) -> int:
return len(self._veri_meta_layers)
def flags(self, index: QModelIndex) -> Qt.ItemFlags:
# Qt QAbstractListModel virtual method
return Qt.ItemIsEnabled | Qt.ItemIsUserCheckable
def data(self, index: QModelIndex, role: int):
# Qt QAbstractListModel virtual method
if index.row() < 0 or index.row() >= len(self._veri_meta_layers):
return None
if role == Qt.DisplayRole:
return self._veri_meta_layers[index.row()].display_name
if role == Qt.CheckStateRole:
return self._veri_meta_layers[index.row()].loaded
return None
def setData(self, index: QModelIndex, value, role: int) -> bool:
# Qt QAbstractListModel virtual method
if role == Qt.CheckStateRole:
if value == Qt.Checked and self.data(index, role) != Qt.PartiallyChecked:
self.__load_verivd_layer(index)
else:
self.__unload_layer(index)
self.iface.mapCanvas().refresh()
self.dataChanged.emit(index, index, [Qt.CheckStateRole])
return True
return False
def unload_all(self):
self.beginResetModel()
for row in range(0, self.rowCount(QModelIndex())):
self.__unload_layer(self.index(row, 0))
self.endResetModel()
self.iface.mapCanvas().refresh()
def layer_context(self, layer: QgsVectorLayer) -> QgsRenderContext:
context = QgsRenderContext.fromMapSettings(self.iface.mapCanvas().mapSettings())
context.expressionContext().appendScope(QgsExpressionContextUtils.layerScope(layer))
return context
def __load_verivd_layer(self, index: QModelIndex):
if Debug:
print("Load layer")
if not self.gpkg_data:
return
veri_meta_layer = self._veri_meta_layers[index.row()]
group_name = self.group_name(veri_meta_layer.display_name)
layer_tree_group = QgsProject.instance().layerTreeRoot().insertGroup(0, group_name)
veri_meta_layer.layer_group_id = veri_meta_layer.name
layer_tree_group.setCustomProperty(VERIVD_GROUP_LAYER_ID, veri_meta_layer.layer_group_id)
layer_tree_group.setExpanded(False)
layer_infos = self.layer_infos(veri_meta_layer.name)
layers = self.gpkg_data.create_qgis_layers(veri_meta_layer.name, layer_infos)
veri_meta_layer.qgis_layers = []
i = 0
loaded_layers = []
for layer_info, qgis_layer in layers.items():
if qgis_layer is None:
print(f"no layer loaded for {layer_info.display_name}")
continue
self.post_process_layer(qgis_layer, i)
added_qgis_layer = QgsProject.instance().addMapLayer(qgis_layer, False)
layer_tree_group.insertLayer(i, added_qgis_layer)
if not layer_info.visibility:
node = QgsProject.instance().layerTreeRoot().findLayer(added_qgis_layer.id())
if node:
node.setItemVisibilityChecked(False)
else:
raise Exception('La couche "{}" n' "a pas été chargée.".format(layer_info.display_name))
loaded_layers.append(layer_info)
veri_meta_layer.qgis_layers.append(added_qgis_layer)
i += 1
veri_meta_layer.loaded = Qt.Checked
if i > 0:
self.layers_loaded.emit(group_name, loaded_layers)
def __unload_layer(self, index: QModelIndex):
if Debug:
print("Unload")
veri_meta_layer = self._veri_meta_layers[index.row()]
self.__is_removing_layer = True
for layer in veri_meta_layer.qgis_layers:
QgsProject.instance().removeMapLayer(layer)
self.__is_removing_layer = False
group = self.find_layer_group(
QgsProject.instance().layerTreeRoot(),
veri_meta_layer.layer_group_id,
)
QgsProject.instance().layerTreeRoot().removeChildNode(group)
veri_meta_layer.layer_group_id = None
veri_meta_layer.qgis_layers = []
veri_meta_layer.loaded = Qt.Unchecked
@staticmethod
def find_layer_group(node: QgsLayerTreeGroup, group_id: str) -> QgsLayerTreeGroup:
for child in node.children():
if QgsLayerTree.isGroup(child):
child_gid = child.customProperty(VERIVD_GROUP_LAYER_ID)
if child_gid == group_id:
return child
else:
group = LayerListModel.find_layer_group(child, group_id)
if group:
return group
return None
def __layers_will_be_removed(self, removed_layer_ids):
if self.__is_removing_layer:
return
self.beginResetModel()
for veri_layer in self._veri_meta_layers:
layers_to_remove = []
for qgis_layer in veri_layer.qgis_layers:
for removed_layer_id in removed_layer_ids:
if removed_layer_id == qgis_layer.id():
layers_to_remove.append(qgis_layer)
if len(layers_to_remove):
for layer in layers_to_remove:
veri_layer.qgis_layers.remove(layer)
if len(veri_layer.qgis_layers) > 0:
veri_layer.loaded = Qt.PartiallyChecked
else:
veri_layer.loaded = Qt.Unchecked
self.endResetModel()
<file_sep>/verivd/core/models/verif_model.py
"""
/***************************************************************************
VeriVD plugin
Copyright (C) 2019 <NAME>
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from qgis.core import QgsProperty, QgsSymbolLayer
from qgis.gui import QgisInterface
from qgis.PyQt.QtCore import Qt
from verivd.core.layer_info import LayerInfo
from verivd.core.layer_list_model import LayerListModel
from verivd.core.symbology_type import SymbologyType
from verivd.core.veri_meta_layer import VeriMetaLayer
VERIF_LAYER_INFOS = {
"VerifCalage": [
LayerInfo(
display_name="Ecart très significatif (ITF)",
layer_name="132_callagebf_ddp_inter",
control_layer=True,
visibility=True,
),
LayerInfo(
display_name="Limites des immeubles (BDMO)",
layer_name="132_callagebf_ddp_ligneae",
visibility=True,
),
LayerInfo(
display_name="Marge 1mm sur ancien plan (BDMO)",
layer_name="132_callagebf_ddp_buff_ae",
visibility=True,
),
LayerInfo(display_name="Périmetre du lot", layer_name="112_itf_mise_a_jourrp"),
],
"VerifBiens_fonds": [
LayerInfo(
display_name='Point limite non-matérialisé, vérification de l\'attribut "defini_exactement"',
layer_name="122_verif_BF_PL_non_mat_def_exa",
control_layer=True,
),
LayerInfo(
display_name="Sommet manquant sous un point limite",
layer_name="100_verif_bf_sommet_manquant_sous_pl",
control_layer=True,
),
LayerInfo(
display_name="Sommet proche d'une limite",
layer_name="100_verif_bf_sommet_proche_pl",
control_layer=True,
),
LayerInfo(
display_name="Point limite manquant sur un sommet",
layer_name="100_verif_bf_pl_manquant_sur_sommet",
control_layer=True,
),
LayerInfo(
display_name="Point limite isolé",
layer_name="100_verif_bf_pl_isole",
control_layer=True,
),
LayerInfo(
display_name="Point limite avant point",
layer_name="100_verif_bf_pl_avant_point",
control_layer=True,
),
LayerInfo(
display_name="Point limite hors périmètre",
layer_name="100_verif_bf_pl_hors_perimetre",
),
LayerInfo(
display_name="Point limite en bordure du périmètre",
layer_name="100_verif_bf_pl_sur_bord_perimetre",
control_layer=True,
),
LayerInfo(
display_name="Point limite décrivant une limite quasi-alignée",
layer_name="101_verif_bf_pl_aligne",
control_layer=True,
),
LayerInfo(
display_name="Ecart d'alignement",
layer_name="101_verif_bf_distance_alignement",
),
LayerInfo(
display_name="Segment de biens fonds simplifié",
layer_name="101_verif_bf_segment_bf_modifie",
),
LayerInfo(
display_name="Segment de DDP simplifié",
layer_name="101_verif_ddp_segment_bf_modifie",
),
LayerInfo(
display_name="Précision planimétrique des points limites",
layer_name="100_verif_bf_pl_precplannum",
),
LayerInfo(
display_name="Surfaces des biens fonds incohérentes",
layer_name="131_no_bf_rp_geom_verif_bf_surf_diff",
control_layer=True,
),
LayerInfo(
display_name="DP non coupé sur limite de plan",
layer_name="131_no_bf_rp_geom_verif_dp_coupe_rp",
control_layer=True,
),
LayerInfo(
display_name="Biens fonds à proximité du lot",
layer_name="111_bdmo_biens_fonds_alentours",
),
LayerInfo(
display_name="Différence des surfaces des immeubles selon 6422bis",
layer_name="107_verif_6422bis",
visibility=False,
),
LayerInfo(
display_name="Plans en vigueur à proximité du lot",
layer_name="111_bdmo_repartition_plans_alentours",
visibility=False,
),
LayerInfo(
display_name="Plans en vigueur du lot",
layer_name="111_bdmo_repartition_plan_en_vigueur_du_lot",
visibility=False,
),
LayerInfo(display_name="Périmetre du lot", layer_name="112_itf_mise_a_jourrp"),
],
"VerifNomenclature": [
LayerInfo(
display_name="Sifflet entre les immeubles et la nomenclature",
layer_name="103_VERIF_BF_NO_Surface",
control_layer=True,
),
LayerInfo(
display_name="Nom local en doublon",
layer_name="131_no_bf_rp_geom_verif_no_doublon_nom",
control_layer=True,
),
LayerInfo(
display_name="Etiquette manquante sur surface de nomenclature",
layer_name="131_no_bf_rp_geom_verif_no_manqueetiquette",
control_layer=True,
),
LayerInfo(display_name="Périmetre du lot", layer_name="112_itf_mise_a_jourrp"),
],
"VerifAdresses": [
LayerInfo(
display_name="Numéro d'entrée",
layer_name="009_itf_bat_posentree_batiment",
control_layer=True,
),
LayerInfo(
display_name="Point de départ des tronçons",
layer_name="009_itf_bat_point_depart",
control_layer=True,
),
LayerInfo(
display_name="Entrée du RCB",
layer_name="104_dwh_adresse_rcb",
visibility=False,
control_layer=True,
),
LayerInfo(
display_name="Différence avec les entrées du RCB",
layer_name="104_verif_entreemo_diff_rcb",
visibility=False,
control_layer=True,
),
LayerInfo(
display_name="Lien entre les entrées et les localisations",
layer_name="109_VERIF_Entree_Vers_Localisation",
control_layer=True,
),
LayerInfo(
display_name="Sens du tronçon",
layer_name="009_ITF_BAT_Troncon_rue",
control_layer=True,
),
LayerInfo(
display_name="Genre du lieu denommé incorrect",
layer_name="129_VERIF_loc_LieuDenGenreErr",
control_layer=True,
),
LayerInfo(
display_name="Genre de la rue ou place incorrect",
layer_name="129_VERIF_loc_RueGenreErr",
control_layer=True,
),
LayerInfo(
display_name="Nom des rues",
layer_name="009_ITF_BAT_Troncon_rue",
symbology_type=SymbologyType.RANDOM_CATEGORIZED,
category_field="texte",
symbology_data_defined_properties={
QgsSymbolLayer.PropertyStrokeWidth: QgsProperty.fromValue(3),
QgsSymbolLayer.PropertyCapStyle: QgsProperty.fromValue(Qt.RoundCap),
QgsSymbolLayer.PropertyJoinStyle: QgsProperty.fromValue(Qt.RoundJoin),
},
control_layer=True,
),
LayerInfo(
display_name="Habitation sans adresse",
layer_name="108_VERIF_Habitation_sans_adresse",
symbology_type=SymbologyType.SIMPLE,
symbology_properties={
"color": "255, 0, 0, 180",
"border_color": "255, 255, 0",
},
visibility=False,
),
LayerInfo(display_name="Périmetre du lot", layer_name="112_itf_mise_a_jourrp"),
],
"VerifCouverture_du_sol": [
LayerInfo(
display_name="Point particulier manquant sous un angle de bâtiment",
layer_name="113_cs_pointbatiment",
symbology_type=SymbologyType.SIMPLE,
symbology_properties={"color": "255, 100, 200", "size": "2"},
control_layer=True,
),
LayerInfo(
display_name="Sifflet entre les immeubles et la couverture du sol",
layer_name="103_verif_bf_cs_surface",
control_layer=True,
),
LayerInfo(
display_name="Point particulier de bâtiment proche limite BF (vecteur)",
layer_name="120_verif_CS_Point_sit_proche_BF_Vecteur",
control_layer=True,
),
LayerInfo(
display_name="Point particulier de bâtiment proche limite BF",
layer_name="120_verif_CS_Point_sit_proche_BF",
control_layer=True,
),
LayerInfo(
display_name="Point particulier pas sur sommet de CS",
sql_request='"source" = "Couverture_du_sol"',
layer_name="126_verif_point_cs_od",
control_layer=True,
),
LayerInfo(display_name="Périmetre du lot", layer_name="112_itf_mise_a_jourrp"),
],
"VerifObjets_divers": [
LayerInfo(
display_name="Incohérence entre le genre et la désignation",
layer_name="121_verif_OD_test_designation",
control_layer=True,
),
LayerInfo(
display_name="Aboutissant du linéraire non-superposé à l'objet surfacique",
layer_name="128_verif_od_test_geom_connbordure",
symbology_type=SymbologyType.SIMPLE,
symbology_properties={"color": "50, 50, 210"},
control_layer=True,
),
LayerInfo(
display_name="Validation OGC des objets divers",
layer_name="110_od_ogc_geometrie",
symbology_type=SymbologyType.RANDOM_CATEGORIZED,
category_field="issue_found",
symbology_data_defined_properties={QgsSymbolLayer.PropertySize: QgsProperty.fromValue(2)},
control_layer=True,
),
LayerInfo(
display_name="Point particulier pas sur sommet de OD",
layer_name="126_verif_point_cs_od",
sql_request='"source" = "Objets_divers"',
control_layer=True,
),
LayerInfo(
display_name="Nombre de géométrie par objet divers linéaires",
layer_name="102_verif_od_lineaire_fid",
control_layer=True,
),
LayerInfo(
display_name="Nombre de géométrie par objet divers surfacique",
layer_name="102_verif_od_surfacique_fid",
control_layer=True,
),
LayerInfo(
display_name="Pas utilisé VD",
layer_name="003_ITF_OD_Element_ponctuel",
sql_request='"vd_genre" in ("autre_corps_de_batiment.autre", "cordon_boise", "bac", "grotte_entree_de_caverne", "autre.divers_a_ventiler")',
symbology_type=SymbologyType.SIMPLE,
symbology_properties={"color": "255, 0, 0", "width": "2"},
control_layer=True,
),
LayerInfo(
display_name="Pas utilisé VD",
layer_name="003_ITF_OD_Element_lineaire",
sql_request='"vd_genre" in ("autre_corps_de_batiment.autre", "cordon_boise", "bac", "grotte_entree_de_caverne", "autre.divers_a_ventiler")',
symbology_type=SymbologyType.SIMPLE,
symbology_properties={"color": "255, 0, 0", "width": "2"},
control_layer=True,
),
LayerInfo(
display_name="Pas utilisé VD",
layer_name="003_ITF_OD_Element_surfacique",
sql_request='"vd_genre" in ("autre_corps_de_batiment.autre", "cordon_boise", "bac", "grotte_entree_de_caverne", "autre.divers_a_ventiler")',
symbology_type=SymbologyType.SIMPLE,
symbology_properties={"color": "255, 0, 0", "width": "2"},
control_layer=True,
),
LayerInfo(
display_name="Objet devrait être entouré par un objet surfacique",
layer_name="114_verif_od_surflineaire_sanssurf",
symbology_type=SymbologyType.SIMPLE,
symbology_properties={"color": "255, 100, 0", "width": "2"},
control_layer=True,
),
LayerInfo(
display_name="Elément surfacique qui devrait être linéaire",
layer_name="114_VERIF_OD_surfaciqueErreur",
symbology_type=SymbologyType.SIMPLE,
symbology_properties={"color": "255, 0, 0", "width": "2"},
control_layer=True,
),
LayerInfo(
display_name="Elément linéaire qui devrait être surfacique",
layer_name="114_VERIF_OD_lineaireErreur",
symbology_type=SymbologyType.SIMPLE,
symbology_properties={"color": "255, 0, 0", "width": "2"},
control_layer=True,
),
LayerInfo(
display_name="Objets divers linéaires (relation vers les géométries)",
layer_name="102_verif_od_lineaire_fid",
symbology_type=SymbologyType.RANDOM_CATEGORIZED,
category_field="fid_od",
symbology_data_defined_properties={QgsSymbolLayer.PropertyStrokeWidth: QgsProperty.fromValue(3)},
opacity=0.5,
control_layer=True,
),
LayerInfo(
display_name="Objets divers surfaciques (relation vers les géométries)",
layer_name="102_verif_od_surfacique_fid",
symbology_type=SymbologyType.RANDOM_CATEGORIZED,
category_field="fid_od",
symbology_data_defined_properties={QgsSymbolLayer.PropertyStrokeWidth: QgsProperty.fromValue(0.5)},
opacity=0.5,
control_layer=True,
),
LayerInfo(display_name="Périmetre du lot", layer_name="112_itf_mise_a_jourrp"),
],
"VerifContinuite_des_reseaux": [
LayerInfo(
display_name="CS Nom et numéro",
layer_name="002_ITF_CS_Pos_Surface_CS",
sql_request='"type" = "Nom_objet" AND ("number_name" LIKE "Route%" OR "number_name" LIKE "Ruisseau%" OR "number_name" LIKE "La%" OR "number_name" LIKE "Le%")',
control_layer=True,
),
LayerInfo(
display_name="OD Nom et numéro",
layer_name="003_ITF_OD_Pos_Element_lineaire",
sql_request='"number_name" LIKE "Ligne%"',
control_layer=True,
),
LayerInfo(
display_name="Cours d'eau (DGE)",
layer_name="112_DWH_Gesreau",
opacity=0.5,
control_layer=True,
),
LayerInfo(
display_name="Traversé de localité (DGMR)",
layer_name="112_DWH_TraverseLocalite",
control_layer=True,
),
LayerInfo(
display_name="Axes de maintenance du réseau routier (DGMR)",
layer_name="112_dwh_axe",
),
LayerInfo(
display_name="Réseaux dans la couverture du sol",
layer_name="002_ITF_CS_Surface_CS",
sql_request='"vd_genre" IN ("eau.cours_eau", "revetement_dur.route_chemin","revetement_dur.chemin_de_fer")',
opacity=0.5,
control_layer=True,
),
LayerInfo(
display_name="Réseaux dans les objet divers linéaires",
layer_name="003_ITF_OD_Element_lineaire",
sql_request='"vd_genre" IN ("eau_canalisee_souterraine","tunnel_passage_inferieur_galerie","pont_passerelle","quai","ru","sentier","ligne_aerienne_a_haute_tension","mat_antenne","conduite_forcee","voie_ferree,telepherique","telecabine_telesiege","telepherique_de_chantier","skilift","bac","axe")',
control_layer=True,
),
LayerInfo(
display_name="Réseaux dans les objet divers surfaciques",
layer_name="003_ITF_OD_Element_surfacique",
sql_request='"vd_genre" IN ("eau_canalisee_souterraine","tunnel_passage_inferieur_galerie","pont_passerelle","quai","ru","sentier","ligne_aerienne_a_haute_tension","mat_antenne","conduite_forcee","voie_ferree,telepherique","telecabine_telesiege","telepherique_de_chantier","skilift","bac","axe")',
control_layer=True,
),
LayerInfo(
display_name="DP",
layer_name="006_ITF_BF_Bien_fonds",
sql_request='"number" LIKE "DP%"',
control_layer=True,
),
LayerInfo(display_name="Périmetre du lot", layer_name="112_itf_mise_a_jourrp"),
],
"VerifPoints_fixes": [
LayerInfo(
display_name="Point fixes dont les attributs ITF vs BDMO ne sont pas identiques",
layer_name="115_itf_pfp_problemeattribut",
opacity=0.5,
visibility=False,
control_layer=True,
),
LayerInfo(
display_name="Points fixes en BDMO mais pas dans le fichier ITF",
layer_name="115_bdmo_pfp_en_plus",
control_layer=True,
),
LayerInfo(
display_name="Points fixes dans le fichier ITF mais pas en BDMO",
layer_name="115_itf_pfp_en_plus",
visibility=False,
control_layer=True,
),
LayerInfo(
display_name="Points fixes dans le fichier ITF mais en dehors du lot",
layer_name="115_itf_pfp_horslot",
control_layer=True,
),
LayerInfo(
display_name="Précision planimétrique des points fixes",
layer_name="115_itf_pfp",
),
LayerInfo(display_name="Périmetre du lot", layer_name="112_itf_mise_a_jourrp"),
],
"VerifLimites_territoriales_et_administratives": [
LayerInfo(
display_name="Géometrie de limite de canton incorrecte (OGC)",
layer_name="116_LigneCANT_OGC_fail",
symbology_type=SymbologyType.SIMPLE,
symbology_properties={"color": "255, 255, 0", "size": "2"},
control_layer=True,
),
LayerInfo(
display_name="Géometrie de limite de commune incorrecte (OGC)",
layer_name="116_LigneCOM_OGC_fail",
symbology_type=SymbologyType.SIMPLE,
symbology_properties={"color": "255, 255, 0", "size": "2"},
control_layer=True,
),
LayerInfo(
display_name="Géometrie de limite de district incorrecte (OGC)",
layer_name="116_LigneDIST_OGC_fail",
symbology_type=SymbologyType.SIMPLE,
symbology_properties={"color": "255, 255, 0", "size": "2"},
control_layer=True,
),
LayerInfo(
display_name="point de limite territoriale manquant sur sommet de limite de commune",
layer_name="116_PL_terr_manquant_sous_sommet_COM",
symbology_type=SymbologyType.SIMPLE,
symbology_properties={"color": "255, 255, 0", "size": "2"},
control_layer=True,
),
LayerInfo(
display_name="sommet de limite de commune manquant sous point de limite territoriale",
layer_name="116_Sommet_COM_manquant_sous_PL_terr",
symbology_type=SymbologyType.SIMPLE,
symbology_properties={"color": "255, 0, 0", "size": "2"},
control_layer=True,
),
LayerInfo(
display_name="sommet de limite de commune manquant sous sommet de limite de canton",
layer_name="116_sommetCOM_manquant_sous_sommet_CANT",
symbology_type=SymbologyType.SIMPLE,
symbology_properties={"color": "255, 255, 0", "size": "2"},
control_layer=True,
),
LayerInfo(
display_name="sommet de limite decommune manquant sous sommet de limite de district",
layer_name="116_sommetCOM_manquant_sous_sommet_Dist",
symbology_type=SymbologyType.SIMPLE,
symbology_properties={"color": "255, 255, 0", "size": "2"},
control_layer=True,
),
LayerInfo(
display_name="point divergent entre les immeubles et la limite de commune",
layer_name="103_VERIF_BF_COM_Point",
symbology_type=SymbologyType.SIMPLE,
symbology_properties={"color": "255, 255, 0", "size": "2"},
control_layer=True,
),
LayerInfo(
display_name="Sifflet entre les immeubles et la limite de commune",
layer_name="103_VERIF_BF_COM_Surface",
control_layer=True,
),
LayerInfo(display_name="Périmetre du lot", layer_name="112_itf_mise_a_jourrp"),
],
"VerifNumerotation": [
LayerInfo(
display_name="Flèche du doublon de numéro",
layer_name="123_verif_numdoublonfleche",
control_layer=True,
),
LayerInfo(
display_name="Doublon de numéro",
layer_name="123_verif_numdoublon",
control_layer=True,
),
LayerInfo(
display_name="Numéros d'immeuble différents entre la BDMO et ITF",
layer_name="123_verif_num_imm_diff_dwh_itf",
control_layer=True,
),
LayerInfo(
display_name="IdentDN sur le mauvais plan - ITF",
layer_name="105_verif_point_sur_mauvais_plan",
control_layer=True,
),
LayerInfo(
display_name="Numéro ECA * unique",
layer_name="123_verif_numetoile_unique",
control_layer=True,
),
LayerInfo(
display_name="IdentDN sur la mauvaise commune - ITF",
layer_name="123_verif_numcom_identdn",
control_layer=True,
),
LayerInfo(
display_name="Nom incorrect",
layer_name="130_od_cs_bat_no_verif_nom",
control_layer=True,
),
LayerInfo(
display_name="Bâtiment hors lot, sur la commune - BDMO",
layer_name="123_verif_num_baths_dwh",
),
LayerInfo(
display_name="Bâtment soutterrain hors lot, sur la commune - BDMO",
layer_name="123_verif_num_batss_dwh",
),
LayerInfo(
display_name="Parcelles hors lot, sur la commune - BDMO",
layer_name="123_verif_num_bf_dwh",
),
LayerInfo(display_name="Périmetre du lot", layer_name="112_itf_mise_a_jourrp"),
LayerInfo(
display_name="Plans alentours",
layer_name="111_bdmo_repartition_plans_alentours",
),
],
"VerifGeometrie": [
LayerInfo(
display_name="Test OGC",
layer_name="119_verif_geomogc",
symbology_type=SymbologyType.RANDOM_CATEGORIZED,
category_field="topic",
symbology_data_defined_properties={QgsSymbolLayer.PropertyStrokeWidth: QgsProperty.fromValue(3)},
opacity=0.5,
visibility=True,
control_layer=True,
),
LayerInfo(
display_name="Arc dont la flèche est inférieure à 3.5cm",
layer_name="106_verif_segmentarc",
sql_request='"fleche" < 0.035',
visibility=True,
control_layer=True,
),
LayerInfo(
display_name="Arc de cercle",
layer_name="106_verif_segmentarc",
visibility=False,
),
LayerInfo(
display_name="Position de label à vérifier",
layer_name="125_verif_poscroisementvecteur",
visibility=True,
control_layer=True,
),
LayerInfo(
display_name="Traits de rappel",
layer_name="125_verif_pos",
visibility=False,
),
LayerInfo(
display_name="Géométrie superposée dans l'ITF",
layer_name="119_verif_geomsuperpose",
control_layer=True,
),
LayerInfo(
display_name="Coordonnée identique entre BDMO et ITF",
layer_name="124_verif_coord_coordidentiqueitf_dwh",
control_layer=True,
),
LayerInfo(
display_name="Vecteurs BDMO",
layer_name="124_verif_coord_vecteur_dwh",
control_layer=True,
),
LayerInfo(
display_name="Objet ponctuel hors du périmètre du lot",
layer_name="112_itf_objet_hors_perimetrelot_point",
symbology_type=SymbologyType.RANDOM_CATEGORIZED,
category_field="nomtable",
symbology_data_defined_properties={QgsSymbolLayer.PropertyStrokeWidth: QgsProperty.fromValue(3)},
opacity=0.5,
visibility=False,
control_layer=True,
),
LayerInfo(
display_name="Objet linéaire hors du périmètre du lot",
layer_name="112_itf_objet_hors_perimetrelot_ligne",
symbology_type=SymbologyType.RANDOM_CATEGORIZED,
category_field="nomtable",
symbology_data_defined_properties={QgsSymbolLayer.PropertyWidth: QgsProperty.fromValue(2)},
opacity=0.5,
visibility=False,
control_layer=True,
),
LayerInfo(
display_name="Objet surfacique hors du périmètre du lot",
layer_name="112_itf_objet_hors_perimetre_surface",
symbology_type=SymbologyType.RANDOM_CATEGORIZED,
category_field="nomtable",
symbology_data_defined_properties={QgsSymbolLayer.PropertyStrokeWidth: QgsProperty.fromValue(2)},
opacity=0.5,
visibility=False,
control_layer=True,
),
LayerInfo(
display_name="Différence entre le contour des biens-fonds et le périmetre du lot",
layer_name="112_itf_diff_mise_a_jourrp_perimbf",
control_layer=True,
),
LayerInfo(display_name="Périmetre du lot", layer_name="112_itf_mise_a_jourrp"),
],
}
# adding justificatifs layers
for veri_vd_layer, layer_infos in VERIF_LAYER_INFOS.items():
layer_names = ", ".join([f"'{li.layer_name.lower()}'" for li in layer_infos])
VERIF_LAYER_INFOS[veri_vd_layer] = [
LayerInfo(
display_name="Justificatifs - point",
layer_name="justificatif_point",
sql_request=f'lower("layer") in ({layer_names})',
symbology_type=SymbologyType.QML,
),
LayerInfo(
display_name="Justificatifs - ligne",
layer_name="justificatif_line",
sql_request=f'lower("layer") in ({layer_names})',
symbology_type=SymbologyType.QML,
),
LayerInfo(
display_name="Justificatifs - surface",
layer_name="justificatif_polygon",
sql_request=f'lower("layer") in ({layer_names})',
symbology_type=SymbologyType.QML,
),
LayerInfo(
display_name="Justificatifs - sans géométrie",
layer_name="justificatif_nogeometry",
sql_request=f'lower("layer") in ({layer_names})',
symbology_type=SymbologyType.NO_SYMBOL,
),
] + layer_infos
def create_veri_meta_layers():
return (
VeriMetaLayer("VerifAdresses", "Vérification - Adresses"),
VeriMetaLayer("VerifBiens_fonds", "Vérification - Biens fonds"),
VeriMetaLayer("VerifCouverture_du_sol", "Vérification - Couverture du sol"),
VeriMetaLayer("VerifObjets_divers", "Vérification - Objets divers"),
VeriMetaLayer("VerifContinuite_des_reseaux", "Vérification - Continuite des reseaux"),
VeriMetaLayer("VerifGeometrie", "Vérification - Géométries"),
VeriMetaLayer("VerifNomenclature", "Vérification - Nomenclature"),
VeriMetaLayer("VerifNumerotation", "Vérification - Nom & Numérotation"),
VeriMetaLayer("VerifLimites_territoriales_et_administratives", "Vérification - Limites territoriales et administratives"),
VeriMetaLayer("VerifPoints_fixes", "Vérification - Points fixes"),
VeriMetaLayer("VerifCalage", "Vérification - Calage des immeubles"),
)
class VerifLayerModel(LayerListModel):
def __init__(self, iface: QgisInterface):
super().__init__(iface, create_veri_meta_layers(), has_control_layers=True)
def reload(self):
self.veri_meta_layers = create_veri_meta_layers()
def layer_infos(self, layer: str) -> [LayerInfo]:
return VERIF_LAYER_INFOS[layer]
<file_sep>/verivd/core/models/ili_validator_model.py
"""
/***************************************************************************
VeriVD plugin
Copyright (C) 2019 <NAME>
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from qgis.core import QgsProperty, QgsSymbolLayer, QgsVectorLayer, QgsWkbTypes
from qgis.gui import QgisInterface
from verivd.core.gpkg_data import MARKER_SHAPE
from verivd.core.layer_info import LayerInfo
from verivd.core.layer_list_model import LayerListModel
from verivd.core.symbology_type import SymbologyType
from verivd.core.topic_layers import TOPIC_LAYERS
from verivd.core.veri_meta_layer import VeriMetaLayer
class IliValidatorLayerModel(LayerListModel):
def __init__(self, iface: QgisInterface):
super().__init__(iface)
def reload(self):
self.beginResetModel()
self._veri_meta_layers = []
if not self.gpkg_data:
return
ili_validator_dict = self.gpkg_data.load_table_list("000_iliValidator_decompte")
for topic in TOPIC_LAYERS:
ili_validator_topic = topic.replace(" ", "_")
if ili_validator_topic in list(ili_validator_dict.keys()):
display_name = f"IliValidator - {topic}: {str(ili_validator_dict[ili_validator_topic])}"
ili_validator_topic = topic.replace(" ", "_")
self._veri_meta_layers.append(VeriMetaLayer(ili_validator_topic, display_name))
self.endResetModel()
def group_name(self, layer):
return f"Résultat du iliValidator - {layer}"
def layer_infos(self, layer: str):
sql_request = f"\"topic\" = '{layer}'"
layer_infos = (
LayerInfo(
display_name=f"Justificatifs - {layer} point",
layer_name="justificatif_point",
sql_request=f"\"layer\" = '000_ilivalidator_point' AND {sql_request}",
symbology_type=SymbologyType.QML,
),
LayerInfo(
display_name=f"Justificatifs - {layer} ligne",
layer_name="justificatif_line",
# les lignes sont dans la couche _surface
sql_request=f"\"layer\" = '000_ilivalidator_surface' AND {sql_request}",
symbology_type=SymbologyType.QML,
),
LayerInfo(
display_name=f"Justificatifs - {layer} surface",
layer_name="justificatif_polygon",
sql_request=f"\"layer\" = '000_ilivalidator_surface' AND {sql_request}",
symbology_type=SymbologyType.QML,
),
LayerInfo(
display_name=f"Justificatifs - {layer} sans géométrie",
layer_name="justificatif_nogeometry",
sql_request=f"\"layer\" = '000_ilivalidator_sans_geometrie' AND {sql_request}",
symbology_type=SymbologyType.NO_SYMBOL,
),
LayerInfo(
display_name=f"iliValidator - {layer} point",
layer_name="000_iliValidator_Point",
symbology_type=SymbologyType.RANDOM_CATEGORIZED,
category_field="observation",
symbology_data_defined_properties={QgsSymbolLayer.PropertySize: QgsProperty.fromValue(5)},
sql_request=sql_request,
),
LayerInfo(
display_name=f"iliValidator - {layer} ligne",
layer_name="000_iliValidator_Ligne",
symbology_type=SymbologyType.RANDOM_CATEGORIZED,
category_field="observation",
symbology_data_defined_properties={QgsSymbolLayer.PropertyStrokeWidth: QgsProperty.fromValue(2)},
sql_request=sql_request,
opacity=0.5,
),
LayerInfo(
display_name=f"iliValidator - {layer} Arc",
layer_name="000_iliValidator_Point_Arc",
symbology_type=SymbologyType.RANDOM_CATEGORIZED,
category_field="observation",
symbology_data_defined_properties={QgsSymbolLayer.PropertyStrokeWidth: QgsProperty.fromValue(2)},
sql_request=sql_request,
opacity=0.5,
),
LayerInfo(
display_name=f"iliValidator - {layer} surface",
layer_name="000_iliValidator_Surface",
symbology_type=SymbologyType.RANDOM_CATEGORIZED,
category_field="observation",
symbology_data_defined_properties={QgsSymbolLayer.PropertyStrokeWidth: QgsProperty.fromValue(2)},
sql_request=sql_request,
opacity=0.5,
),
LayerInfo(
display_name=f"iliValidator - {layer} sans géométrie",
layer_name="000_iliValidator_Sans_geometrie",
symbology_type=SymbologyType.NO_SYMBOL,
sql_request=sql_request,
),
LayerInfo(
display_name="Périmetre du lot",
layer_name="112_itf_mise_a_jourrp",
),
)
return layer_infos
def post_process_layer(self, layer: QgsVectorLayer, position: int):
if layer.geometryType() == QgsWkbTypes.PointGeometry:
for symbol in layer.renderer().symbols(self.layer_context(layer)):
symbol.symbolLayer(0).setShape(MARKER_SHAPE[position % (len(MARKER_SHAPE) - 1)])
<file_sep>/verivd/gui/help.py
#!/usr/bin/env python
WEB_GEOPACKAGE = "https://www.geopackage.org/"
WEB_MOPUBLIC = "https://www.cadastre.ch/fr/services/service/mopublic.html"
WEB_NORMEVD = "https://dwa.vd.ch/PROD/DINF/publicationdinf1_p.nsf/normesformulaires?SearchView&Query="
REQUETE = "(FIELD%20TitreNorme%20CONTAINS%20{})"
WEB_CHECKER = "https://www.vd.ch/themes/territoire-et-construction/informations-sur-le-territoire/mensuration-officielle/informations-aux-bureaux-de-geometres/checker-interlis/"
WEB_ILIVALIDATOR = "https://www.interlis.ch/fr/downloads/ilivalidator"
WEB_6052 = "https://www.vd.ch/fileadmin/user_upload/organisation/dinf/sit/fichiers_pdf/VeriVD/6052_VeriVD_infoV2_projet_20210423.pdf"
MESSAGE_FICHIER = f"""Le fichier .gpkg qui alimente ce plugin, contient une base de données spatiale au format <a href='{WEB_GEOPACKAGE}'>geopackage</a>.
Ce fichier est generée à la DCG sur la base d'un fichier interlis décrit selon le modèle MD01MOVDMN95."""
MESSAGE_BASE = """Cet onglet permet le chargement des couches de bases. Celles-ci sont inspirées du modèle simplifé
de la mensuration officielle: <a href='{}'>MO-Public</a>. Quelques éléments importants issues du <a href='{}'>modèle
officiel</a> ont éé ajoutés à ces données avec un 'préfixe VD'. La symbologie provient de la <a href='{}'>norme
vaudoise 6411</a>""".format(
WEB_MOPUBLIC,
WEB_NORMEVD + REQUETE.format("6021"),
WEB_NORMEVD + REQUETE.format("6411"),
)
MESSAGE_ILIVALIDATOR = f"""Cet onglet permet le chargement des données issues de <a href='{WEB_ILIVALIDATOR}'>l'ilivalidator interlis
</a>. Pour plus d'informations, veuiller consulter la page web s'y référant. """
MESSAGE_CHECKER = f"""Cet onglet permet le chargement des données issues du <a href='{WEB_CHECKER}'>checker interlis
</a>. Pour plus d'informations, veuiller consulter la page web s'y référant. """
MESSAGE_VERIF = f"""Cet onglet permet le chargement des données des tests de vérification dévelopés en interne à
la DCG (direction du cadastre et de la géoinformation). Des explications relatives à chacun de ces tests sont disponnibles
dans la fiche <a href='{WEB_6052}'>6052</a>. """
<file_sep>/verivd/core/veri_meta_layer.py
"""
/***************************************************************************
VeriVD plugin
Copyright (C) 2019 <NAME>
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from qgis.core import QgsLayerTreeGroup, QgsVectorLayer
from qgis.PyQt.QtCore import Qt
class VeriMetaLayer:
"""
This is the base class for the data in layer list models.
This holds information about loaded QGIS layers and layer tree information.
1 VeriMetaLayer loads several QGIS layers.
"""
def __init__(self, name: str, display_name: str = None):
self._name = name
self._display_name = display_name or name
self._loaded = Qt.Unchecked
self._layer_group_id = None
self._qgis_layers: [QgsVectorLayer] = []
@property
def name(self):
return self._name
@property
def display_name(self):
return self._display_name
@property
def loaded(self):
return self._loaded
@loaded.setter
def loaded(self, value: bool):
self._loaded = value
@property
def layer_group_id(self):
return self._layer_group_id
@layer_group_id.setter
def layer_group_id(self, value: QgsLayerTreeGroup):
self._layer_group_id = value
@property
def qgis_layers(self):
return self._qgis_layers
@qgis_layers.setter
def qgis_layers(self, value: [QgsVectorLayer]):
self._qgis_layers = value
<file_sep>/verivd/core/topic_layers.py
"""
/***************************************************************************
VeriVD plugin
Copyright (C) 2019 <NAME>
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
# REMEMBER TO SORT WHEN EDITING
TOPIC_LAYERS = (
"Adresses des batiments",
"Altimetrie",
"Biens fonds",
"Bords de plan" "Conduites",
"Couverture du sol",
"Domaines numerotation",
"Limites canton",
"Limites commune",
"Limites district",
"Nomenclature",
"NPA Localite",
"Objets divers",
"Points fixesCategorie1",
"Points fixesCategorie2",
"Points fixesCategorie3",
"RepartitionNT",
"Repartitions plans",
"Zones glissement",
)
|
1d084aab49e9e5aa028b5accbbfb82ac463c8408
|
[
"Markdown",
"SQL",
"Python"
] | 19 |
Python
|
VeriVD/qgis_VeriVD
|
2cec5a5aca5c11b56fe5f50f3126b0be52843941
|
7b62fe6afcce18fae178832791dcb676bb5987ed
|
refs/heads/master
|
<repo_name>CalvinMogodi/OUTsuranceAssessment<file_sep>/OUTsuranceAssessment/FileManager.cs
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace OUTsuranceAssessment
{
public class FileManager
{
#region Members
#region Constructors
/// <summary>
/// Initializes the instance
/// </summary>
public FileManager() {
}
#endregion
#region Methods
/// <summary>
/// Read CSV and write to a text file
/// </summary>
public bool ReadCSVAndWriteTextFile()
{
bool isComplete = false;
string filePath = @"C:\Users\Admin\Downloads\data.csv";
try
{
// Read from a csv file and skip header
using (var reader = new StreamReader(filePath))
{
List<string> firstList = new List<string>();
List<string> secondList = new List<string>();
bool isHeader = true;
while (!reader.EndOfStream)
{
var line = reader.ReadLine();
if (!isHeader)
{
var values = line.Split(',');
firstList.Add(values[0]);
firstList.Add(values[1]);
secondList.Add(values[2]);
}
isHeader = false;
}
// Order names by frequency descending and then alphabetically ascending.
List<string> orderedNames = firstList.GroupBy(s => s).Select(g => new { Name = g.Key, Frequency = g.Count() }).OrderByDescending(c => c.Frequency).ThenBy(c => c.Name).Select( c => (string) string.Format("{0}, {1}", c.Name, c.Frequency.ToString())).ToList();
// Write to the first file
string firstFilePath = @"C:\Users\Admin\Downloads\First File.txt";
WriteToTextFile(firstFilePath, orderedNames);
// order list and write to the first file
string secondFilePath = @"C:\Users\Admin\Downloads\Second File.txt";
List<string> orderedAddresses = SortListAlphabetically(secondList);
WriteToTextFile(secondFilePath, orderedAddresses);
isComplete = true;
}
}
catch (Exception ex)
{
Console.WriteLine("Error occurred: " + ex.Message);
}
return isComplete;
}
/// <summary>
/// Write data to a text file
/// </summary>
/// <param name="filePathAndName"></param>
/// <param name="lines"></param>
/// <returns>pass or fail</returns>
public bool WriteToTextFile(string filePathAndName, List<string> lines)
{
bool isDone = false;
try
{
File.WriteAllLines(filePathAndName, lines);
isDone = true;
return isDone;
}
catch (Exception ex)
{
return isDone;
throw;
}
}
/// <summary>
/// Sort the list alphabetically. This will affect the performance if the list is longer
/// </summary>
/// <param name="unSortedList"></param>
/// <returns>List<string></returns>
public List<string> SortListAlphabetically(List<string> unSortedList) {
List<SortItem> listToSort = new List<SortItem>();
List<string> sortedList = new List<string>();
foreach (var item in unSortedList)
{
foreach (char character in item)
{
if (Char.IsLetter(character))
{
var result = item.Substring(item.IndexOf(character));
SortItem Sort = new SortItem() { Character = result, Index = unSortedList.IndexOf(item) };
listToSort.Add(Sort);
break;
}
}
}
var addresses = listToSort.OrderBy(a => a.Character);
// get the ordinal string by position
foreach (var item in addresses)
{
sortedList.Add(unSortedList.ElementAt(item.Index));
}
return sortedList;
}
#endregion
#endregion
}
}
<file_sep>/README.md
# OUTsuranceAssessment
IDE - Visual Studio 2015
Project Type - Console Application
Please note
The file to read from is - C:\Users\Admin\Downloads\data.csv
First output file is - C:\Users\Admin\Downloads\First File.txt
Second output file is - C:\Users\Admin\Downloads\First File.txt
<file_sep>/OUTsuranceAssessment/Program.cs
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading.Tasks;
namespace OUTsuranceAssessment
{
class Program
{
static void Main(string[] args)
{
Console.WriteLine("Started");
FileManager fileManager = new FileManager();
Console.WriteLine("Processing ...");
if (fileManager.ReadCSVAndWriteTextFile())
Console.WriteLine("Completed");
else
Console.WriteLine("Failed");
}
}
}
|
f32f3db8cf57ad308962e36b2833520d11107a78
|
[
"Markdown",
"C#"
] | 3 |
C#
|
CalvinMogodi/OUTsuranceAssessment
|
8ce710ac9798d22e49e24192db97f5bae36c035b
|
e991b1108786316f270e6054aad853cc96fbd29a
|
refs/heads/master
|
<file_sep>heywire-laravel
===============
Send message with HeyWire from your Laravel App.
-----------------------
Add to app/config/app.php
* Providers:
<pre><code>'Ielijose\HeywireLaravel\HeywireLaravelServiceProvider',</code></pre>
* Aliases:
<pre><code>'Heywire' => 'Ielijose\HeywireLaravel\Heywire',</code></pre>
<file_sep><?php namespace Ielijose\HeywireLaravel;
use Illuminate\Support\ServiceProvider;
class HeywireLaravelServiceProvider extends ServiceProvider {
/**
* Indicates if loading of the provider is deferred.
*
* @var bool
*/
protected $defer = false;
/**
* Register the service provider.
*
* @return void
*/
public function register()
{
$this->app['heywire'] = $this->app->share(function($app)
{
return new Heywire;
});
$this->app->booting(function()
{
$loader = \Illuminate\Foundation\AliasLoader::getInstance();
$loader->alias('Heywire', 'Ielijose\HeywireLaravel\Facades\Heywire');
});
}
/**
* Get the services provided by the provider.
*
* @return array
*/
public function provides()
{
return array('heywire');
}
}
|
8b8d4dccf3c8adc40027a6332663f1ec13f4a9bb
|
[
"Markdown",
"PHP"
] | 2 |
Markdown
|
gerswin/heywire-laravel
|
e411ba9a3c0e82f9b9be010fdf3f5d4c76f33a8f
|
48e4ab258f6311ccc66bd78f88527e5850789777
|
refs/heads/master
|
<file_sep>const currentUser = {
userId: '00000000-0000-0000-0000-000000000000',
isMachine: true,
scopes: []
}
const UserTCConnCopilot = {
userId: '4709473d-f060-4102-87f8-4d51ff0b34c1',
handle: 'TCConnCopilot'
}
const userWithManagePermission = {
hasManagePermission: true
}
const regularUser = {
userId: '222'
}
const ESClient = {}
module.exports = {
currentUser,
UserTCConnCopilot,
userWithManagePermission,
regularUser,
ESClient
}
<file_sep>/*
* Handle events for Team.
*/
const _ = require('lodash')
const config = require('config')
const logger = require('../common/logger')
const helper = require('../common/helper')
/**
* Once we create a team, the notification emails to be sent out.
*
* @param {Object} payload the event payload
* @returns {undefined}
*/
async function sendNotificationEmail (payload) {
const template = helper.getEmailTemplatesForKey('notificationEmailTemplates')['taas.notification.team-created']
const data = {
subject: template.subject,
teamName: payload.project.name,
teamUrl: `${config.TAAS_APP_URL}/${payload.project.id}`,
jobList: _.map(payload.jobs, j => ({
title: j.title,
duration: j.duration,
startDate: helper.formatDateEDT(j.startDate),
jobUrl: `${config.TAAS_APP_URL}/${payload.project.id}/positions/${j.id}`
})),
notificationType: {
newTeamCreated: true
},
description: 'New Team Created'
}
data.subject = helper.substituteStringByObject(data.subject, data)
const emailData = {
serviceId: 'email',
type: 'taas.notification.team-created',
details: {
from: template.from,
recipients: _.map(payload.project.members, m => _.pick(m, 'userId')),
data,
sendgridTemplateId: template.sendgridTemplateId,
version: 'v3'
}
}
await helper.postEvent(config.NOTIFICATIONS_CREATE_TOPIC, {
notifications: [emailData]
})
logger.debug({
component: 'TeamEventHandler',
context: 'sendNotificationEmail',
message: `project id: ${payload.project.id}, subject: ${data.subject}, created with jobs: ${_.join(_.map(payload.jobs, 'id'), ',')}`
})
}
/**
* Process team creating event.
*
* @param {Object} payload the event payload
* @returns {undefined}
*/
async function processCreate (payload) {
await sendNotificationEmail(payload)
}
module.exports = {
processCreate
}
<file_sep>const { Sequelize, Model } = require('sequelize')
const config = require('config')
const errors = require('../common/errors')
module.exports = (sequelize) => {
class JobCandidate extends Model {
/**
* Create association between models
* @param {Object} models the database models
*/
static associate (models) {
JobCandidate._models = models
JobCandidate.belongsTo(models.Job, { foreignKey: 'jobId' })
JobCandidate.hasMany(models.Interview, { foreignKey: 'jobCandidateId', as: 'interviews' })
}
/**
* Get job candidate by id
* @param {String} id the job candidate id
* @param {Array} include include options
* @returns {JobCandidate} the JobCandidate instance
*/
static async findById (id, include = []) {
const jobCandidate = await JobCandidate.findOne({
where: {
id
},
include
})
if (!jobCandidate) {
throw new errors.NotFoundError(`id: ${id} "JobCandidate" doesn't exists.`)
}
return jobCandidate
}
}
JobCandidate.init(
{
id: {
type: Sequelize.UUID,
primaryKey: true,
allowNull: false,
defaultValue: Sequelize.UUIDV4
},
jobId: {
field: 'job_id',
type: Sequelize.UUID,
allowNull: false
},
userId: {
field: 'user_id',
type: Sequelize.UUID,
allowNull: false
},
viewedByCustomer: {
field: 'viewed_by_customer',
type: Sequelize.BOOLEAN,
defaultValue: false,
allowNull: false
},
status: {
type: Sequelize.STRING(255),
allowNull: false
},
externalId: {
field: 'external_id',
type: Sequelize.STRING(255)
},
resume: {
type: Sequelize.STRING(2048)
},
remark: {
type: Sequelize.STRING(255)
},
createdBy: {
field: 'created_by',
type: Sequelize.UUID,
allowNull: false
},
updatedBy: {
field: 'updated_by',
type: Sequelize.UUID
},
createdAt: {
field: 'created_at',
type: Sequelize.DATE
},
updatedAt: {
field: 'updated_at',
type: Sequelize.DATE
},
deletedAt: {
field: 'deleted_at',
type: Sequelize.DATE
}
},
{
schema: config.DB_SCHEMA_NAME,
sequelize,
tableName: 'job_candidates',
paranoid: true,
deletedAt: 'deletedAt',
createdAt: 'createdAt',
updatedAt: 'updatedAt',
timestamps: true,
defaultScope: {
attributes: {
exclude: ['deletedAt']
}
},
hooks: {
afterCreate: (jobCandidate) => {
delete jobCandidate.dataValues.deletedAt
},
afterDestroy: async (jobCandidate) => {
// cascade (soft) delete interviews associated with this jobCandidate
const jobCandidateId = jobCandidate.id
await sequelize.models.Interview.destroy({
where: { jobCandidateId }
})
}
}
}
)
return JobCandidate
}
<file_sep>/*
* Configure email templates.
* Variables can be used inside the subject and the message of a template(enclosed in double curly braces).
*/
const config = require('config')
module.exports = {
/**
* List all the kind of emails which could be sent by the endpoint `POST /taas-teams/email` inside `teamTemplates`.
*/
teamTemplates: {
/* Report a general issue for a team.
*
* - projectId: the project ID. Example: 123412
* - projectName: the project name. Example: "TaaS API Misc Updates"
* - reportText: the body of reported issue. Example: "I have issue with ... \n ... Thank you in advance!"
*/
'team-issue-report': {
subject: 'Issue Reported on TaaS Team {{projectName}} ({{projectId}}).',
body: 'Project Name: {{projectName}}' + '\n' +
'Project ID: {{projectId}}' + '\n' +
`Project URL: ${config.TAAS_APP_URL}/{{projectId}}` + '\n' +
'\n' +
'{{reportText}}',
recipients: config.REPORT_ISSUE_EMAILS,
sendgridTemplateId: config.REPORT_ISSUE_SENDGRID_TEMPLATE_ID
},
/* Report issue for a particular member
*
* - userHandle: the user handle. Example: "bili_2021"
* - projectId: the project ID. Example: 123412
* - projectName: the project name. Example: "TaaS API Misc Updates"
* - reportText: the body of reported issue. Example: "I have issue with ... \n ... Thank you in advance!"
*/
'member-issue-report': {
subject: 'Issue Reported for member {{userHandle}} on TaaS Team {{projectName}} ({{projectId}}).',
body: 'User Handle: {{userHandle}}' + '\n' +
'Project Name: {{projectName}}' + '\n' +
'Project ID: {{projectId}}' + '\n' +
`Project URL: ${config.TAAS_APP_URL}/{{projectId}}` + '\n' +
'\n' +
'{{reportText}}',
recipients: config.REPORT_ISSUE_EMAILS,
sendgridTemplateId: config.REPORT_ISSUE_SENDGRID_TEMPLATE_ID
},
/* Request extension for a particular member
*
* - userHandle: the user handle. Example: "bili_2021"
* - projectId: the project ID. Example: 123412
* - projectName: the project name. Example: "TaaS API Misc Updates"
* - text: comment for the request. Example: "I would like to keep working with this member for 2 months..."
*/
'extension-request': {
subject: 'Extension Requested for member {{userHandle}} on TaaS Team {{projectName}} ({{projectId}}).',
body: 'User Handle: {{userHandle}}' + '\n' +
'Project Name: {{projectName}}' + '\n' +
'Project ID: {{projectId}}' + '\n' +
`Project URL: ${config.TAAS_APP_URL}/{{projectId}}` + '\n' +
'\n' +
'{{text}}',
recipients: config.REPORT_ISSUE_EMAILS,
sendgridTemplateId: config.REQUEST_EXTENSION_SENDGRID_TEMPLATE_ID
},
/* Request interview for a job candidate
*
* - interviewType: the x.ai interview type. Example: "interview-30"
* - interviewRound: the round of the interview. Example: 2
* - interviewDuration: duration of the interview, in minutes. Example: 30
* - interviewerList: The list of interviewer email addresses. Example: "<EMAIL>, <EMAIL>"
* - candidateId: the id of the jobCandidate. Example: "cc562545-7b75-48bf-87e7-50b3c57e41b1"
* - candidateName: Full name of candidate. Example: "<NAME>"
* - jobName: The title of the job. Example: "TaaS API Misc Updates"
*
* Template (defined in SendGrid):
* Subject: '{{interviewType}} tech interview with {{candidateName}} for {{jobName}} is requested by the Customer'
* Body:
* 'Hello!
* <br /><br />
* Congratulations, you have been selected to participate in a Topcoder Gig Work Interview!
* <br /><br />
* Please monitor your email for a response to this where you can coordinate your availability.
* <br /><br />
* Interviewee: {{candidateName}}<br />
* Interviewer(s): {{interviewerList}}<br />
* Interview Length: {{interviewDuration}} minutes
* <br /><br />
* /{{interviewType}}
* <br /><br />
* Topcoder Info:<br />
* Note: "id: {{candidateId}}, round: {{interviewRound}}"'
*
* Note, that the template should be defined in SendGrid.
* The subject & body above (identical to actual SendGrid template) is for reference purposes.
* We won't pass subject & body but only substitutions (replacements in template subject/body).
*/
'interview-invitation': {
subject: '',
body: '',
from: config.INTERVIEW_INVITATION_SENDER_EMAIL,
cc: config.INTERVIEW_INVITATION_CC_LIST,
recipients: config.INTERVIEW_INVITATION_RECIPIENTS_LIST,
sendgridTemplateId: config.INTERVIEW_INVITATION_SENDGRID_TEMPLATE_ID
}
},
/**
* List all kind of emails which could be send as Email Notifications by scheduler, API endpoints or anything else.
*/
notificationEmailTemplates: {
'taas.notification.job-candidate-resume-viewed': {
subject: 'Topcoder - Client View Resume for Job {{jobName}}',
body: '',
recipients: [],
from: config.NOTIFICATION_SENDER_EMAIL,
sendgridTemplateId: config.NOTIFICATION_SENDGRID_TEMPLATE_ID
},
'taas.notification.candidates-available-for-review': {
subject: 'Topcoder - {{teamName}} has job candidates available for review',
body: '',
recipients: [],
from: config.NOTIFICATION_SENDER_EMAIL,
sendgridTemplateId: config.NOTIFICATION_SENDGRID_TEMPLATE_ID
},
'taas.notification.interview-coming-up-host': {
subject: 'Topcoder - Interview Coming Up: {{jobTitle}} with {{guestFullName}}',
body: '',
recipients: [],
from: config.NOTIFICATION_SENDER_EMAIL,
sendgridTemplateId: config.NOTIFICATION_SENDGRID_TEMPLATE_ID
},
'taas.notification.interview-coming-up-guest': {
subject: 'Topcoder - Interview Coming Up: {{jobTitle}} with {{hostFullName}}',
body: '',
recipients: [],
from: config.NOTIFICATION_SENDER_EMAIL,
sendgridTemplateId: config.NOTIFICATION_SENDGRID_TEMPLATE_ID
},
'taas.notification.interview-awaits-resolution': {
subject: 'Topcoder - Interview Awaits Resolution: {{jobTitle}} for {{guestFullName}}',
body: '',
recipients: [],
from: config.NOTIFICATION_SENDER_EMAIL,
sendgridTemplateId: config.NOTIFICATION_SENDGRID_TEMPLATE_ID
},
'taas.notification.post-interview-action-required': {
subject: 'Topcoder - Candidate Action Required in {{teamName}} for {{numCandidates}} candidates',
body: '',
recipients: [],
from: config.NOTIFICATION_SENDER_EMAIL,
sendgridTemplateId: config.NOTIFICATION_SENDGRID_TEMPLATE_ID
},
'taas.notification.resource-booking-expiration': {
subject: 'Topcoder - Resource Booking Expiring in {{teamName}} for {{numResourceBookings}} resource bookings',
body: '',
recipients: [],
from: config.NOTIFICATION_SENDER_EMAIL,
sendgridTemplateId: config.NOTIFICATION_SENDGRID_TEMPLATE_ID
},
'taas.notification.team-created': {
subject: 'Topcoder - New Team {{teamName}} Created',
body: '',
recipients: [],
from: config.NOTIFICATION_SENDER_EMAIL,
sendgridTemplateId: config.NOTIFICATION_SENDGRID_TEMPLATE_ID
},
'taas.notification.job-created': {
subject: 'Topcoder - New Job {{jobTitle}} Created in Team {{teamName}}',
body: '',
recipients: [],
from: config.NOTIFICATION_SENDER_EMAIL,
sendgridTemplateId: config.NOTIFICATION_SENDGRID_TEMPLATE_ID
},
'taas.notification.interviews-overlapping': {
subject: 'Topcoder - Interviews overlapping',
body: '',
recipients: config.NOTIFICATION_OPS_EMAILS,
from: config.NOTIFICATION_SENDER_EMAIL,
sendgridTemplateId: config.NOTIFICATION_SENDGRID_TEMPLATE_ID
},
'taas.notification.job-candidate-selected': {
subject: 'Topcoder - Job Candidate {{userHandle}} Selected for {{jobTitle}} in Team {{teamName}}',
body: '',
recipients: config.NOTIFICATION_OPS_EMAILS,
from: config.NOTIFICATION_SENDER_EMAIL,
sendgridTemplateId: config.NOTIFICATION_SENDGRID_TEMPLATE_ID
},
'taas.notification.resource-booking-placed': {
subject: 'Topcoder - Resource Booking {{userHandle}} Placed for Job {{jobTitle}} in Team {{teamName}}',
body: '',
recipients: [],
from: config.NOTIFICATION_SENDER_EMAIL,
sendgridTemplateId: config.NOTIFICATION_SENDGRID_TEMPLATE_ID
}
}
}
<file_sep>const _ = require('lodash')
const config = require('config')
const moment = require('moment')
const models = require('../models')
const { getMemberDetailsByHandle, getChallenge, getChallengeResource, sleep, postEvent, postErrorEvent } = require('../common/helper')
const logger = require('../common/logger')
const { createChallenge, addResourceToChallenge, activateChallenge, closeChallenge } = require('./PaymentService')
const { ChallengeStatus, PaymentSchedulerStatus, PaymentProcessingSwitch } = require('../../app-constants')
const {
processUpdate
} = require('../esProcessors/WorkPeriodPaymentProcessor')
const sequelize = models.sequelize
const WorkPeriodPayment = models.WorkPeriodPayment
const WorkPeriod = models.WorkPeriod
const PaymentScheduler = models.PaymentScheduler
const {
SWITCH, BATCH_SIZE, IN_PROGRESS_EXPIRED, MAX_RETRY_COUNT, RETRY_BASE_DELAY, RETRY_MAX_DELAY, PER_REQUEST_MAX_TIME, PER_PAYMENT_MAX_TIME,
PER_MINUTE_PAYMENT_MAX_COUNT, PER_MINUTE_CHALLENGE_REQUEST_MAX_COUNT, PER_MINUTE_RESOURCE_REQUEST_MAX_COUNT,
FIX_DELAY_STEP_CREATE_CHALLENGE, FIX_DELAY_STEP_ASSIGN_MEMBER, FIX_DELAY_STEP_ACTIVATE_CHALLENGE
} = config.PAYMENT_PROCESSING
const processStatus = {
perMin: {
minute: '0:0',
paymentsProcessed: 0,
challengeRequested: 0,
resourceRequested: 0
},
perMinThreshold: {
paymentsProcessed: PER_MINUTE_PAYMENT_MAX_COUNT,
challengeRequested: PER_MINUTE_CHALLENGE_REQUEST_MAX_COUNT,
resourceRequested: PER_MINUTE_RESOURCE_REQUEST_MAX_COUNT
},
paymentStartTime: 0,
requestStartTime: 0
}
const processResult = {
SUCCESS: 'success',
FAIL: 'fail',
SKIP: 'skip'
}
const localLogger = {
debug: (message, context) => logger.debug({ component: 'PaymentSchedulerService', context, message }),
error: (message, context) => logger.error({ component: 'PaymentSchedulerService', context, message }),
info: (message, context) => logger.info({ component: 'PaymentSchedulerService', context, message })
}
/**
* Scheduler process entrance
*/
async function processScheduler () {
// Get the oldest Work Periods Payment records in status "scheduled" and "in-progress",
// the in progress state may be caused by an abnormal shutdown,
// or it may be a normal record that is still being processed
const workPeriodPaymentList = await WorkPeriodPayment.findAll({ where: { status: ['in-progress', 'scheduled'] }, order: [['status', 'desc'], ['createdAt']], limit: BATCH_SIZE })
localLogger.info(`start processing ${workPeriodPaymentList.length} of payments`, 'processScheduler')
const failIds = []
const skipIds = []
for (const workPeriodPayment of workPeriodPaymentList) {
const result = await processPayment(workPeriodPayment)
if (result === processResult.FAIL) {
failIds.push(workPeriodPayment.id)
} else if (result === processResult.SKIP) {
skipIds.push(workPeriodPayment.id)
}
}
localLogger.info(`process end. ${workPeriodPaymentList.length - failIds.length - skipIds.length} of payments processed successfully`, 'processScheduler')
if (!_.isEmpty(skipIds)) {
localLogger.info(`payments: ${_.join(skipIds, ',')} are processing by other processor`, 'processScheduler')
}
if (!_.isEmpty(failIds)) {
localLogger.error(`payments: ${_.join(failIds, ',')} are processed failed`, 'processScheduler')
}
}
/**
* Process a record of payment
* @param {Object} workPeriodPayment the work period payment
* @returns {String} process result
*/
async function processPayment (workPeriodPayment) {
processStatus.paymentStartTime = Date.now()
let paymentScheduler
if (workPeriodPayment.status === 'in-progress') {
paymentScheduler = await PaymentScheduler.findOne({ where: { workPeriodPaymentId: workPeriodPayment.id, status: 'in-progress' } })
// If the in-progress record has not expired, it is considered to be being processed by other processes
if (paymentScheduler && moment(paymentScheduler.updatedAt).add(moment.duration(IN_PROGRESS_EXPIRED)).isAfter(moment())) {
localLogger.info(`workPeriodPayment: ${workPeriodPayment.id} is being processed by other processor`, 'processPayment')
return processResult.SKIP
}
} else {
const oldValue = workPeriodPayment.toJSON()
let entity
let key
try {
await sequelize.transaction(async (t) => {
const updated = await workPeriodPayment.update({ status: 'in-progress' }, { transaction: t })
key = `workPeriodPayment.billingAccountId:${updated.billingAccountId}`
entity = updated.toJSON()
await processUpdate({ ...entity, key })
})
} catch (e) {
if (entity) {
postErrorEvent(config.TAAS_ERROR_TOPIC, entity, 'workperiodpayment.update')
}
throw e
}
await postEvent(config.TAAS_WORK_PERIOD_PAYMENT_UPDATE_TOPIC, entity, { oldValue: oldValue, key })
}
// Check whether the number of processed records per minute exceeds the specified number, if it exceeds, wait for the next minute before processing
await checkWait(PaymentSchedulerStatus.START_PROCESS)
localLogger.info(`Processing workPeriodPayment ${workPeriodPayment.id}`, 'processPayment')
const workPeriod = await WorkPeriod.findById(workPeriodPayment.workPeriodId)
try {
if (!paymentScheduler) {
// 1. create challenge
const challengeId = await withRetry(createChallenge, [getCreateChallengeParam(workPeriod, workPeriodPayment)], validateError, PaymentSchedulerStatus.CREATE_CHALLENGE)
paymentScheduler = await PaymentScheduler.create({ challengeId, step: PaymentSchedulerStatus.CREATE_CHALLENGE, workPeriodPaymentId: workPeriodPayment.id, userHandle: workPeriod.userHandle, status: 'in-progress' })
} else {
// If the paymentScheduler already exists, it means that this is a record caused by an abnormal shutdown
await setPaymentSchedulerStep(paymentScheduler)
}
// Start from unprocessed step, perform the process step by step
while (paymentScheduler.step !== PaymentSchedulerStatus.CLOSE_CHALLENGE) {
await processStep(paymentScheduler)
}
const oldValue = workPeriodPayment.toJSON()
let key
let entity
try {
await sequelize.transaction(async (t) => {
// 5. update wp and save it should only update already existent Work Period Payment record with created "challengeId" and "status=completed".
const updated = await workPeriodPayment.update({ challengeId: paymentScheduler.challengeId, status: 'completed' }, { transaction: t })
entity = updated.toJSON()
await processUpdate({ ...entity, key })
})
} catch (e) {
if (entity) {
postErrorEvent(config.TAAS_ERROR_TOPIC, entity, 'workperiodpayment.update')
}
throw e
}
// Update the modified status to es
await postEvent(config.TAAS_WORK_PERIOD_PAYMENT_UPDATE_TOPIC, entity, { oldValue: oldValue, key })
await paymentScheduler.update({ step: PaymentSchedulerStatus.CLOSE_CHALLENGE, userId: paymentScheduler.userId, status: 'completed' })
localLogger.info(`Processed workPeriodPayment ${workPeriodPayment.id} successfully`, 'processPayment')
return processResult.SUCCESS
} catch (err) {
logger.logFullError(err, { component: 'PaymentSchedulerService', context: 'processPayment' })
const statusDetails = { errorMessage: extractErrorMessage(err), errorCode: _.get(err, 'status', -1), retry: _.get(err, 'retry', -1), step: _.get(err, 'step'), challengeId: paymentScheduler ? paymentScheduler.challengeId : null }
const oldValue = workPeriodPayment.toJSON()
let entity
let key
try {
await sequelize.transaction(async (t) => {
// If payment processing failed Work Periods Payment "status" should be changed to "failed" and populate "statusDetails" field with error details in JSON format.
const updated = await workPeriodPayment.update({ statusDetails, status: 'failed' }, { transaction: t })
key = `workPeriodPayment.billingAccountId:${updated.billingAccountId}`
entity = updated.toJSON()
await processUpdate({ ...entity, key })
})
} catch (e) {
if (entity) {
postErrorEvent(config.TAAS_ERROR_TOPIC, entity, 'workperiodpayment.update')
}
throw e
}
await postEvent(config.TAAS_WORK_PERIOD_PAYMENT_UPDATE_TOPIC, entity, { oldValue: oldValue, key })
if (paymentScheduler) {
await paymentScheduler.update({ step: _.get(err, 'step'), userId: paymentScheduler.userId, status: 'failed' })
}
localLogger.error(`Processed workPeriodPayment ${workPeriodPayment.id} failed`, 'processPayment')
return processResult.FAIL
}
}
/**
* Perform a specific step in the process
* @param {Object} paymentScheduler the payment scheduler
*/
async function processStep (paymentScheduler) {
if (paymentScheduler.step === PaymentSchedulerStatus.CREATE_CHALLENGE) {
// 2. assign member to the challenge
await withRetry(addResourceToChallenge, [paymentScheduler.challengeId, paymentScheduler.userHandle], validateError, PaymentSchedulerStatus.ASSIGN_MEMBER)
paymentScheduler.step = PaymentSchedulerStatus.ASSIGN_MEMBER
} else if (paymentScheduler.step === PaymentSchedulerStatus.ASSIGN_MEMBER) {
// 3. active the challenge
await withRetry(activateChallenge, [paymentScheduler.challengeId], validateError, PaymentSchedulerStatus.ACTIVATE_CHALLENGE)
paymentScheduler.step = PaymentSchedulerStatus.ACTIVATE_CHALLENGE
} else if (paymentScheduler.step === PaymentSchedulerStatus.ACTIVATE_CHALLENGE) {
// 4.1. get user id
const { userId } = await withRetry(getMemberDetailsByHandle, [paymentScheduler.userHandle], validateError, PaymentSchedulerStatus.GET_USER_ID)
paymentScheduler.userId = userId
paymentScheduler.step = PaymentSchedulerStatus.GET_USER_ID
} else if (paymentScheduler.step === PaymentSchedulerStatus.GET_USER_ID) {
// 4.2. close the challenge
await withRetry(closeChallenge, [paymentScheduler.challengeId, paymentScheduler.userId, paymentScheduler.userHandle], validateError, PaymentSchedulerStatus.CLOSE_CHALLENGE)
paymentScheduler.step = PaymentSchedulerStatus.CLOSE_CHALLENGE
}
}
/**
* Set the scheduler actual step
* @param {Object} paymentScheduler the scheduler object
*/
async function setPaymentSchedulerStep (paymentScheduler) {
const challenge = await getChallenge(paymentScheduler.challengeId)
if (SWITCH === PaymentProcessingSwitch.OFF) {
paymentScheduler.step = PaymentSchedulerStatus.CLOSE_CHALLENGE
} else if (challenge.status === ChallengeStatus.COMPLETED) {
paymentScheduler.step = PaymentSchedulerStatus.CLOSE_CHALLENGE
} else if (challenge.status === ChallengeStatus.ACTIVE) {
paymentScheduler.step = PaymentSchedulerStatus.ACTIVATE_CHALLENGE
} else {
const resource = await getChallengeResource(paymentScheduler.challengeId, paymentScheduler.userHandle, config.ROLE_ID_SUBMITTER)
if (resource) {
paymentScheduler.step = PaymentSchedulerStatus.ASSIGN_MEMBER
} else {
paymentScheduler.step = PaymentSchedulerStatus.CREATE_CHALLENGE
}
}
// The main purpose is updating the updatedAt of payment scheduler to avoid simultaneous processing
await paymentScheduler.update({ step: paymentScheduler.step })
}
/**
* Generate the create challenge parameter
* @param {Object} workPeriod the work period
* @param {Object} workPeriodPayment the work period payment
* @returns {Object} the create challenge parameter
*/
function getCreateChallengeParam (workPeriod, workPeriodPayment) {
return {
projectId: workPeriod.projectId,
userHandle: workPeriod.userHandle,
amount: workPeriodPayment.amount,
name: `TaaS Payment - ${workPeriod.userHandle} - Week Ending ${moment(workPeriod.endDate).format('D/M/YYYY')}`,
description: `TaaS Payment - ${workPeriod.userHandle} - Week Ending ${moment(workPeriod.endDate).format('D/M/YYYY')}`,
billingAccountId: workPeriodPayment.billingAccountId
}
}
/**
* Before each step is processed, wait for the corresponding time
* @param {String} step the step name
* @param {Number} tryCount the try count
*/
async function checkWait (step, tryCount) {
// When calculating the retry time later, we need to subtract the time that has been waited before
let lapse = 0
if (step === PaymentSchedulerStatus.START_PROCESS) {
lapse += await checkPerMinThreshold('paymentsProcessed')
} else if (step === PaymentSchedulerStatus.CREATE_CHALLENGE) {
await checkPerMinThreshold('challengeRequested')
} else if (step === PaymentSchedulerStatus.ASSIGN_MEMBER) {
// Only when tryCount = 0, it comes from the previous step, and it is necessary to wait for a fixed time
if (FIX_DELAY_STEP_CREATE_CHALLENGE > 0 && tryCount === 0) {
await sleep(FIX_DELAY_STEP_CREATE_CHALLENGE)
}
lapse += await checkPerMinThreshold('resourceRequested')
} else if (step === PaymentSchedulerStatus.ACTIVATE_CHALLENGE) {
// Only when tryCount = 0, it comes from the previous step, and it is necessary to wait for a fixed time
if (FIX_DELAY_STEP_ASSIGN_MEMBER > 0 && tryCount === 0) {
await sleep(FIX_DELAY_STEP_ASSIGN_MEMBER)
}
lapse += await checkPerMinThreshold('challengeRequested')
} else if (step === PaymentSchedulerStatus.CLOSE_CHALLENGE) {
// Only when tryCount = 0, it comes from the previous step, and it is necessary to wait for a fixed time
if (FIX_DELAY_STEP_ACTIVATE_CHALLENGE > 0 && tryCount === 0) {
await sleep(FIX_DELAY_STEP_ACTIVATE_CHALLENGE)
}
lapse += await checkPerMinThreshold('challengeRequested')
}
if (tryCount > 0) {
// exponential backoff and do not exceed the maximum retry delay
const retryDelay = Math.min(RETRY_BASE_DELAY * Math.pow(2, tryCount), RETRY_MAX_DELAY)
await sleep(retryDelay - lapse)
}
}
/**
* Determine whether the number of records processed every minute exceeds the specified number, if it exceeds, wait for the next minute
* @param {String} key the min threshold key
* @returns {Number} wait time
*/
async function checkPerMinThreshold (key) {
const mt = moment()
const min = mt.format('h:m')
let waitMs = 0
if (processStatus.perMin.minute === min) {
if (processStatus.perMin[key] >= processStatus.perMinThreshold[key]) {
waitMs = (60 - mt.seconds()) * 1000
localLogger.info(`The number of records of ${key} processed per minute reaches ${processStatus.perMinThreshold[key]}, and it need to wait for ${60 - mt.seconds()} seconds until the next minute`)
await sleep(waitMs)
processStatus.perMin = {
minute: moment().format('h:m'),
paymentsProcessed: 0,
challengeRequested: 0,
resourceRequested: 0
}
}
} else {
processStatus.perMin = {
minute: min,
paymentsProcessed: 0,
challengeRequested: 0,
resourceRequested: 0
}
}
processStatus.perMin[key]++
return waitMs
}
/**
* Determine whether it can try again
* @param {Object} err the process error
* @returns {Boolean}
*/
function validateError (err) {
return err.status >= 500 && err.status < 600
}
/**
* Execute the function, if an exception occurs, retry according to the conditions
* @param {Function} func the main function
* @param {Array} argArr the args of main function
* @param {Function} predictFunc the determine error function
* @param {String} step the step name
* @returns the result of main function
*/
async function withRetry (func, argArr, predictFunc, step) {
let tryCount = 0
processStatus.requestStartTime = Date.now()
while (true) {
await checkWait(step, tryCount)
tryCount++
try {
// mock code
if (SWITCH === PaymentProcessingSwitch.OFF) {
// without actual API calls by adding delay (for example 1 second for each step), to simulate the act
sleep(1000)
if (step === PaymentSchedulerStatus.CREATE_CHALLENGE) {
return '00000000-0000-0000-0000-000000000000'
} else if (step === PaymentSchedulerStatus.GET_USER_ID) {
return { userId: 100001 }
}
return
} else {
// Execute the main function
const result = await func(...argArr)
return result
}
} catch (err) {
const now = Date.now()
// The following is the case of not retrying:
// 1. The number of retries exceeds the configured number
// 2. The thrown error does not match the retry conditions
// 3. The request execution time exceeds the configured time
// 4. The processing time of the payment record exceeds the configured time
if (tryCount > MAX_RETRY_COUNT || !predictFunc(err) || now - processStatus.requestStartTime > PER_REQUEST_MAX_TIME || now - processStatus.paymentStartTime > PER_PAYMENT_MAX_TIME) {
err.retry = tryCount
err.step = step
throw err
}
localLogger.info(`execute ${step} with error: ${err.message}, retry...`, 'withRetry')
}
}
}
/**
* Extract error message from throwed error object
* @param {object} err the error object
* @returns {String} the error message
*/
function extractErrorMessage (err) {
return _.get(err, 'response.body.result.content.message') ||
_.get(err, 'response.body.message') ||
_.get(err, 'message') ||
_.get(err, 'response.res.statusMessage') ||
err.toString()
}
module.exports = {
processScheduler
}
<file_sep>version: "3"
services:
postgres:
container_name: tc-taas-postgres
image: postgres:11.8
environment:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: <PASSWORD>
ports:
- 5432:5432
zookeeper:
image: wurstmeister/zookeeper
container_name: tc-taas-zookeeper
ports:
- 2181:2181
kafka:
image: wurstmeister/kafka
container_name: tc-taas-kafka
depends_on:
- zookeeper
ports:
- 9092:9092
environment:
KAFKA_ADVERTISED_LISTENERS: INSIDE://kafka:9093,OUTSIDE://localhost:9092
KAFKA_LISTENERS: INSIDE://kafka:9093,OUTSIDE://0.0.0.0:9092
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INSIDE:PLAINTEXT,OUTSIDE:PLAINTEXT
KAFKA_INTER_BROKER_LISTENER_NAME: INSIDE
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
kafka-client:
container_name: tc-kafka-client
build: ./kafka-client
depends_on:
- kafka
- zookeeper
elasticsearch:
image: elasticsearch:7.7.1
container_name: tc-taas-elasticsearch
environment:
- discovery.type=single-node
ports:
- 9200:9200
taas-es-processor:
container_name: tc-taas-es-processor
build:
context: ./generic-tc-service
args:
NODE_VERSION: 12.16.3
GIT_URL: https://github.com/topcoder-platform/taas-es-processor
GIT_BRANCH: dev
command: start kafka-client
ports:
- 5001:5001
depends_on:
- kafka-client
- elasticsearch
environment:
- KAFKA_URL=kafka:9093
- ES_HOST=http://elasticsearch:9200
- BUSAPI_URL=http://tc-bus-api:8002/v5
- AUTH0_URL=${AUTH0_URL}
- AUTH0_AUDIENCE=${AUTH0_AUDIENCE}
- AUTH0_CLIENT_ID=${AUTH0_CLIENT_ID}
- AUTH0_CLIENT_SECRET=${AUTH0_CLIENT_SECRET}
tc-bus-api:
container_name: tc-bus-api
build:
context: ./generic-tc-service
args:
NODE_VERSION: 8.11.3
GIT_URL: https://github.com/topcoder-platform/tc-bus-api
GIT_BRANCH: dev
BYPASS_TOKEN_VALIDATION: 1
command: start kafka-client
ports:
- 8002:8002
depends_on:
- kafka-client
environment:
- PORT=8002
- KAFKA_URL=kafka:9093
- JWT_TOKEN_SECRET=<PASSWORD>
- VALID_ISSUERS="[\"https:\/\/topcoder-newauth.auth0.com\/\",\"https:\/\/api.topcoder-dev.com\",\"https:\/\/topcoder-dev.auth0.com\/\"]"
- AUTH0_URL
- AUTH0_AUDIENCE
- AUTH0_CLIENT_ID
- AUTH0_CLIENT_SECRET
- AUTH0_PROXY_SERVER_URL<file_sep>/**
* Interview Processor
*/
const _ = require('lodash')
const helper = require('../common/helper')
const config = require('config')
const esClient = helper.getESClient()
/**
* Updates jobCandidate via a painless script
*
* @param {String} jobCandidateId job candidate id
* @param {String} script script definition
*/
async function updateJobCandidateViaScript (jobCandidateId, script) {
await esClient.update({
index: config.get('esConfig.ES_INDEX_JOB_CANDIDATE'),
id: jobCandidateId,
body: { script },
refresh: 'wait_for'
})
}
/**
* Process request interview entity.
* Creates an interview record under jobCandidate.
*
* @param {Object} interview interview object
*/
async function processRequestInterview (interview) {
// add interview in collection if there's already an existing collection
// or initiate a new one with this interview
const script = {
source: `
ctx._source.containsKey("interviews")
? ctx._source.interviews.add(params.interview)
: ctx._source.interviews = [params.interview]
`,
params: { interview }
}
await updateJobCandidateViaScript(interview.jobCandidateId, script)
}
/**
* Process update interview entity
* Updates the interview record under jobCandidate.
*
* @param {Object} interview interview object
*/
async function processUpdateInterview (interview) {
// if there's an interview with this id,
// update it
const script = {
source: `
if (ctx._source.containsKey("interviews")) {
def target = ctx._source.interviews.find(i -> i.id == params.interview.id);
if (target != null) {
for (prop in params.interview.entrySet()) {
target[prop.getKey()] = prop.getValue()
}
}
}
`,
params: { interview }
}
await updateJobCandidateViaScript(interview.jobCandidateId, script)
}
/**
* Process bulk (partially) update interviews entity.
* Currently supports status, updatedAt and updatedBy fields.
* Update Joi schema to allow more fields.
* (implementation should already handle new fields - just updating Joi schema should be enough)
*
* payload format:
* {
* "jobCandidateId": {
* "interviewId": { ...fields },
* "interviewId2": { ...fields },
* ...
* },
* "jobCandidateId2": { // like above... },
* ...
* }
*
* @param {Object} jobCandidates job candidates
*/
async function processBulkUpdateInterviews (jobCandidates) {
// script to update & params
const script = {
source: `
def completedInterviews = params.jobCandidates[ctx._id];
for (interview in completedInterviews.entrySet()) {
def interviewId = interview.getKey();
def affectedFields = interview.getValue();
def target = ctx._source.interviews.find(i -> i.id == interviewId);
if (target != null) {
for (field in affectedFields.entrySet()) {
target[field.getKey()] = field.getValue();
}
}
}
`,
params: { jobCandidates }
}
// update interviews
await esClient.updateByQuery({
index: config.get('esConfig.ES_INDEX_JOB_CANDIDATE'),
body: {
script,
query: {
ids: {
values: _.keys(jobCandidates)
}
}
},
refresh: true
})
}
module.exports = {
processRequestInterview,
processUpdateInterview,
processBulkUpdateInterviews
}
<file_sep>/* eslint-disable no-unused-expressions */
const expect = require('chai').expect
const sinon = require('sinon')
const models = require('../../src/models')
const service = require('../../src/services/ResourceBookingService')
const workPeriodService = require('../../src/services/WorkPeriodService')
const commonData = require('./common/CommonData')
const testData = require('./common/ResourceBookingData')
const helper = require('../../src/common/helper')
const errors = require('../../src/common/errors')
const _ = require('lodash')
const busApiClient = helper.getBusApiClient()
const ResourceBooking = models.ResourceBooking
const WorkPeriod = models.WorkPeriod
describe('resourceBooking service test', () => {
let stubPostEvent
let stubCreateWorkPeriodService
let stubUpdateWorkPeriodService
let stubDeleteWorkPeriodService
beforeEach(() => {
stubPostEvent = sinon.stub(busApiClient, 'postEvent').callsFake(async () => undefined)
stubCreateWorkPeriodService = sinon.stub(workPeriodService, 'createWorkPeriod').callsFake(async () => undefined)
stubUpdateWorkPeriodService = sinon.stub(workPeriodService, 'partiallyUpdateWorkPeriod').callsFake(async () => undefined)
stubDeleteWorkPeriodService = sinon.stub(workPeriodService, 'deleteWorkPeriod').callsFake(async () => undefined)
})
afterEach(() => {
sinon.restore()
})
describe('Create resource booking successfully', () => {
let stubEnsureJobById
let stubEnsureUserById
beforeEach(() => {
stubEnsureJobById = sinon.stub(helper, 'ensureJobById').callsFake(async () => undefined)
stubEnsureUserById = sinon.stub(helper, 'ensureUserById').callsFake(async () => commonData.UserTCConnCopilot)
})
it('T01:Create resource booking start Saturday end Sunday', async () => {
const data = testData.T01
const stubDBCreate = sinon.stub(ResourceBooking, 'create').callsFake(() => {
return data.resourceBooking.response
})
const entity = await service.createResourceBooking(commonData.currentUser, data.resourceBooking.request)
expect(entity).to.deep.eql(data.resourceBooking.response.toJSON())
expect(stubEnsureJobById.calledOnce).to.be.true
expect(stubEnsureUserById.calledOnce).to.be.true
expect(stubDBCreate.calledOnce).to.be.true
expect(stubPostEvent.calledOnce).to.be.true
expect(stubCreateWorkPeriodService.callCount).to.eq(6)
expect(stubUpdateWorkPeriodService.callCount).to.eq(0)
expect(stubDeleteWorkPeriodService.callCount).to.eq(0)
expect(stubCreateWorkPeriodService.getCall(0).args[0]).to.deep.eq(data.workPeriod.request[0])
expect(stubCreateWorkPeriodService.getCall(1).args[0]).to.deep.eq(data.workPeriod.request[1])
expect(stubCreateWorkPeriodService.getCall(2).args[0]).to.deep.eq(data.workPeriod.request[2])
expect(stubCreateWorkPeriodService.getCall(3).args[0]).to.deep.eq(data.workPeriod.request[3])
expect(stubCreateWorkPeriodService.getCall(4).args[0]).to.deep.eq(data.workPeriod.request[4])
})
it('T02:Create resource booking start Sunday end Saturday', async () => {
const data = testData.T02
const stubDBCreate = sinon.stub(ResourceBooking, 'create').callsFake(async () => {
return data.resourceBooking.response
})
const entity = await service.createResourceBooking(commonData.currentUser, data.resourceBooking.request)
expect(entity).to.deep.eql(data.resourceBooking.response.toJSON())
expect(stubEnsureJobById.calledOnce).to.be.true
expect(stubEnsureUserById.calledOnce).to.be.true
expect(stubDBCreate.calledOnce).to.be.true
expect(stubPostEvent.calledOnce).to.be.true
expect(stubCreateWorkPeriodService.callCount).to.eq(1)
expect(stubUpdateWorkPeriodService.callCount).to.eq(0)
expect(stubDeleteWorkPeriodService.callCount).to.eq(0)
expect(stubCreateWorkPeriodService.getCall(0).args[0]).to.deep.eq(data.workPeriod.request[0])
})
it('T03:Create resource booking without startDate', async () => {
const data = testData.T03
const stubDBCreate = sinon.stub(ResourceBooking, 'create').callsFake(async () => {
return data.resourceBooking.response
})
const entity = await service.createResourceBooking(commonData.currentUser, data.resourceBooking.request)
expect(entity).to.deep.eql(data.resourceBooking.response.toJSON())
expect(stubEnsureJobById.calledOnce).to.be.true
expect(stubEnsureUserById.calledOnce).to.be.true
expect(stubDBCreate.calledOnce).to.be.true
expect(stubPostEvent.calledOnce).to.be.true
expect(stubCreateWorkPeriodService.callCount).to.eq(0)
expect(stubUpdateWorkPeriodService.callCount).to.eq(0)
expect(stubDeleteWorkPeriodService.callCount).to.eq(0)
})
it('T04:Create resource booking without endDate', async () => {
const data = testData.T04
const stubDBCreate = sinon.stub(ResourceBooking, 'create').callsFake(async () => {
return data.resourceBooking.response
})
const entity = await service.createResourceBooking(commonData.currentUser, data.resourceBooking.request)
expect(entity).to.deep.eql(data.resourceBooking.response.toJSON())
expect(stubEnsureJobById.calledOnce).to.be.true
expect(stubEnsureUserById.calledOnce).to.be.true
expect(stubDBCreate.calledOnce).to.be.true
expect(stubPostEvent.calledOnce).to.be.true
expect(stubCreateWorkPeriodService.callCount).to.eq(0)
expect(stubUpdateWorkPeriodService.callCount).to.eq(0)
expect(stubDeleteWorkPeriodService.callCount).to.eq(0)
})
})
describe('Create resource booking unsuccessfully', () => {
let stubEnsureJobById
let stubEnsureUserById
beforeEach(() => {
stubEnsureJobById = sinon.stub(helper, 'ensureJobById').callsFake(async () => undefined)
stubEnsureUserById = sinon.stub(helper, 'ensureUserById').callsFake(async () => commonData.UserTCConnCopilot)
})
it('T05:Fail to create resource booking with startDate greater then endDate', async () => {
const data = testData.T05
const stubDBCreate = sinon.stub(ResourceBooking, 'create').callsFake(() => {
return data.resourceBooking.response
})
let error
try {
await service.createResourceBooking(commonData.currentUser, data.resourceBooking.request)
} catch (err) {
error = err
}
expect(error.message).to.eq(data.error.message)
expect(stubEnsureJobById.notCalled).to.be.true
expect(stubEnsureUserById.notCalled).to.be.true
expect(stubDBCreate.notCalled).to.be.true
expect(stubPostEvent.notCalled).to.be.true
expect(stubCreateWorkPeriodService.callCount).to.eq(0)
expect(stubUpdateWorkPeriodService.callCount).to.eq(0)
expect(stubDeleteWorkPeriodService.callCount).to.eq(0)
})
})
describe('Update resource booking successfully', () => {
it('T06:Update resource booking dates and do not cause work period create/delete', async () => {
const data = testData.T06
const stubResourceBookingFindById = sinon.stub(ResourceBooking, 'findById').callsFake(async () => {
return data.resourceBooking.value
})
const stubWorkPeriodFindAll = sinon.stub(WorkPeriod, 'findAll').callsFake(async (criteria) => {
if (criteria.raw) {
return _.map(data.workPeriod.response, wp => wp.toJSON())
}
return data.workPeriod.response
})
const entity = await service.partiallyUpdateResourceBooking(commonData.currentUser, data.resourceBooking.value.dataValues.id, data.resourceBooking.request)
expect(entity).to.deep.eql(data.resourceBooking.response.toJSON())
expect(stubResourceBookingFindById.calledOnce).to.be.true
expect(stubPostEvent.calledOnce).to.be.true
expect(stubWorkPeriodFindAll.called).to.be.true
expect(stubCreateWorkPeriodService.callCount).to.eq(0)
expect(stubUpdateWorkPeriodService.callCount).to.eq(1)
expect(stubDeleteWorkPeriodService.callCount).to.eq(0)
expect(stubUpdateWorkPeriodService.getCall(0).args[1]).to.deep.eq(data.workPeriod.request[0].id)
expect(stubUpdateWorkPeriodService.getCall(0).args[2]).to.deep.eq(data.workPeriod.request[0].data)
})
it('T07:Update resource booking dates and cause work period creation - 1', async () => {
const data = testData.T07
const stubResourceBookingFindById = sinon.stub(ResourceBooking, 'findById').callsFake(async () => {
return data.resourceBooking.value
})
const stubWorkPeriodFindAll = sinon.stub(WorkPeriod, 'findAll').callsFake(async (criteria) => {
if (criteria.raw) {
return _.map(data.workPeriod.response, wp => wp.toJSON())
}
return data.workPeriod.response
})
const entity = await service.partiallyUpdateResourceBooking(commonData.currentUser, data.resourceBooking.value.dataValues.id, data.resourceBooking.request)
expect(entity).to.deep.eql(data.resourceBooking.response.toJSON())
expect(stubResourceBookingFindById.calledOnce).to.be.true
expect(stubPostEvent.calledOnce).to.be.true
expect(stubWorkPeriodFindAll.called).to.be.true
expect(stubCreateWorkPeriodService.callCount).to.eq(1)
expect(stubUpdateWorkPeriodService.callCount).to.eq(1)
expect(stubDeleteWorkPeriodService.callCount).to.eq(0)
expect(stubCreateWorkPeriodService.getCall(0).args[0]).to.deep.eq(data.workPeriod.request[0].data)
expect(stubUpdateWorkPeriodService.getCall(0).args[1]).to.deep.eq(data.workPeriod.request[1].id)
expect(stubUpdateWorkPeriodService.getCall(0).args[2]).to.deep.eq(data.workPeriod.request[1].data)
})
it('T08:Update resource booking dates and cause work period creation - 2', async () => {
const data = testData.T08
const stubResourceBookingFindById = sinon.stub(ResourceBooking, 'findById').callsFake(async () => {
return data.resourceBooking.value
})
const stubWorkPeriodFindAll = sinon.stub(WorkPeriod, 'findAll').callsFake(async (criteria) => {
if (criteria.raw) {
return _.map(data.workPeriod.response, wp => wp.toJSON())
}
return data.workPeriod.response
})
const entity = await service.partiallyUpdateResourceBooking(commonData.currentUser, data.resourceBooking.value.dataValues.id, data.resourceBooking.request)
expect(entity).to.deep.eql(data.resourceBooking.response.toJSON())
expect(stubResourceBookingFindById.calledOnce).to.be.true
expect(stubPostEvent.calledOnce).to.be.true
expect(stubWorkPeriodFindAll.called).to.be.true
expect(stubCreateWorkPeriodService.callCount).to.eq(1)
expect(stubUpdateWorkPeriodService.callCount).to.eq(0)
expect(stubDeleteWorkPeriodService.callCount).to.eq(0)
expect(stubCreateWorkPeriodService.getCall(0).args[0]).to.deep.eq(data.workPeriod.request[0].data)
})
it('T09:Update resource booking startDate and cause work period to be deleted', async () => {
const data = testData.T09
const stubResourceBookingFindById = sinon.stub(ResourceBooking, 'findById').callsFake(async () => {
return data.resourceBooking.value
})
const stubWorkPeriodFindAll = sinon.stub(WorkPeriod, 'findAll').callsFake(async (criteria) => {
if (criteria.raw) {
return _.map(data.workPeriod.response, wp => wp.toJSON())
}
return data.workPeriod.response
})
const entity = await service.partiallyUpdateResourceBooking(commonData.currentUser, data.resourceBooking.value.dataValues.id, data.resourceBooking.request)
expect(entity).to.deep.eql(data.resourceBooking.response.toJSON())
expect(stubResourceBookingFindById.calledOnce).to.be.true
expect(stubPostEvent.calledOnce).to.be.true
expect(stubWorkPeriodFindAll.called).to.be.true
expect(stubCreateWorkPeriodService.callCount).to.eq(0)
expect(stubUpdateWorkPeriodService.callCount).to.eq(1)
expect(stubDeleteWorkPeriodService.callCount).to.eq(1)
expect(stubDeleteWorkPeriodService.getCall(0).args[0]).to.deep.eq(data.workPeriod.request[0].id)
expect(stubUpdateWorkPeriodService.getCall(0).args[1]).to.deep.eq(data.workPeriod.request[1].id)
expect(stubUpdateWorkPeriodService.getCall(0).args[2]).to.deep.eq(data.workPeriod.request[1].data)
})
it('T10:Update resource booking endDate and cause work period to be deleted', async () => {
const data = testData.T10
const stubResourceBookingFindById = sinon.stub(ResourceBooking, 'findById').callsFake(async () => {
return data.resourceBooking.value
})
const stubWorkPeriodFindAll = sinon.stub(WorkPeriod, 'findAll').callsFake(async (criteria) => {
if (criteria.raw) {
return _.map(data.workPeriod.response, wp => wp.toJSON())
}
return data.workPeriod.response
})
const entity = await service.partiallyUpdateResourceBooking(commonData.currentUser, data.resourceBooking.value.dataValues.id, data.resourceBooking.request)
expect(entity).to.deep.eql(data.resourceBooking.response.toJSON())
expect(stubResourceBookingFindById.calledOnce).to.be.true
expect(stubPostEvent.calledOnce).to.be.true
expect(stubWorkPeriodFindAll.called).to.be.true
expect(stubCreateWorkPeriodService.callCount).to.eq(0)
expect(stubUpdateWorkPeriodService.callCount).to.eq(1)
expect(stubDeleteWorkPeriodService.callCount).to.eq(1)
expect(stubDeleteWorkPeriodService.getCall(0).args[0]).to.deep.eq(data.workPeriod.request[0].id)
expect(stubUpdateWorkPeriodService.getCall(0).args[1]).to.deep.eq(data.workPeriod.request[1].id)
expect(stubUpdateWorkPeriodService.getCall(0).args[2]).to.deep.eq(data.workPeriod.request[1].data)
})
it('T11:Update resource booking dates and cause work period daysWorked to change', async () => {
const data = testData.T11
const stubResourceBookingFindById = sinon.stub(ResourceBooking, 'findById').callsFake(async () => {
return data.resourceBooking.value
})
const stubWorkPeriodFindAll = sinon.stub(WorkPeriod, 'findAll').callsFake(async (criteria) => {
if (criteria.raw) {
return _.map(data.workPeriod.response, wp => wp.toJSON())
}
return data.workPeriod.response
})
const entity = await service.partiallyUpdateResourceBooking(commonData.currentUser, data.resourceBooking.value.dataValues.id, data.resourceBooking.request)
expect(entity).to.deep.eql(data.resourceBooking.response.toJSON())
expect(stubResourceBookingFindById.calledOnce).to.be.true
expect(stubPostEvent.calledOnce).to.be.true
expect(stubWorkPeriodFindAll.called).to.be.true
expect(stubCreateWorkPeriodService.callCount).to.eq(0)
expect(stubUpdateWorkPeriodService.callCount).to.eq(2)
expect(stubDeleteWorkPeriodService.callCount).to.eq(0)
expect(stubUpdateWorkPeriodService.getCall(0).args[1]).to.deep.eq(data.workPeriod.request[0].id)
expect(stubUpdateWorkPeriodService.getCall(0).args[2]).to.deep.eq(data.workPeriod.request[0].data)
expect(stubUpdateWorkPeriodService.getCall(1).args[1]).to.deep.eq(data.workPeriod.request[1].id)
expect(stubUpdateWorkPeriodService.getCall(1).args[2]).to.deep.eq(data.workPeriod.request[1].data)
})
it('T12:Update resource booking dates and cause delete, update, create work period operations', async () => {
const data = testData.T12
const stubResourceBookingFindById = sinon.stub(ResourceBooking, 'findById').callsFake(async () => {
return data.resourceBooking.value
})
const stubWorkPeriodFindAll = sinon.stub(WorkPeriod, 'findAll').callsFake(async (criteria) => {
if (criteria.raw) {
return _.map(data.workPeriod.response, wp => wp.toJSON())
}
return data.workPeriod.response
})
const entity = await service.partiallyUpdateResourceBooking(commonData.currentUser, data.resourceBooking.value.dataValues.id, data.resourceBooking.request)
expect(entity).to.deep.eql(data.resourceBooking.response.toJSON())
expect(stubResourceBookingFindById.calledOnce).to.be.true
expect(stubPostEvent.calledOnce).to.be.true
expect(stubWorkPeriodFindAll.called).to.be.true
expect(stubCreateWorkPeriodService.callCount).to.eq(1)
expect(stubUpdateWorkPeriodService.callCount).to.eq(1)
expect(stubDeleteWorkPeriodService.callCount).to.eq(1)
expect(stubDeleteWorkPeriodService.getCall(0).args[0]).to.deep.eq(data.workPeriod.request[0].id)
expect(stubUpdateWorkPeriodService.getCall(0).args[1]).to.deep.eq(data.workPeriod.request[1].id)
expect(stubUpdateWorkPeriodService.getCall(0).args[2]).to.deep.eq(data.workPeriod.request[1].data)
expect(stubCreateWorkPeriodService.getCall(0).args[0]).to.deep.eq(data.workPeriod.request[2].data)
})
})
describe('Update resource booking unsuccessfully', () => {
it('T13:Fail to update resource booking status to cancelled', async () => {
const data = testData.T13
const stubResourceBookingFindById = sinon.stub(ResourceBooking, 'findById').callsFake(async () => {
return data.resourceBooking.value
})
const stubWorkPeriodFindAll = sinon.stub(WorkPeriod, 'findAll').callsFake(async (criteria) => {
if (criteria.raw) {
return _.map(data.workPeriod.response, wp => wp.toJSON())
}
return data.workPeriod.response
})
let error
try {
await service.partiallyUpdateResourceBooking(commonData.currentUser, data.resourceBooking.value.dataValues.id, data.resourceBooking.request)
} catch (err) {
error = err
}
expect(error.httpStatus).to.eq(data.error.httpStatus)
expect(error.message).to.eq(data.error.message)
expect(stubResourceBookingFindById.calledOnce).to.be.true
expect(stubPostEvent.notCalled).to.be.true
expect(stubWorkPeriodFindAll.called).to.be.true
expect(stubCreateWorkPeriodService.callCount).to.eq(0)
expect(stubUpdateWorkPeriodService.callCount).to.eq(0)
expect(stubDeleteWorkPeriodService.callCount).to.eq(0)
})
it('T14:Fail to update resource booking dates', async () => {
const data = testData.T14
const stubResourceBookingFindById = sinon.stub(ResourceBooking, 'findById').callsFake(async () => {
return data.resourceBooking.value
})
const stubWorkPeriodFindAll = sinon.stub(WorkPeriod, 'findAll').callsFake(async (criteria) => {
if (criteria.raw) {
return _.map(data.workPeriod.response, wp => wp.toJSON())
}
return data.workPeriod.response
})
let error
try {
await service.partiallyUpdateResourceBooking(commonData.currentUser, data.resourceBooking.value.dataValues.id, data.resourceBooking.request)
} catch (err) {
error = err
}
expect(error.httpStatus).to.eq(data.error.httpStatus)
expect(error.message).to.eq(data.error.message)
expect(stubResourceBookingFindById.calledOnce).to.be.true
expect(stubPostEvent.notCalled).to.be.true
expect(stubWorkPeriodFindAll.called).to.be.true
expect(stubCreateWorkPeriodService.callCount).to.eq(0)
expect(stubUpdateWorkPeriodService.callCount).to.eq(0)
expect(stubDeleteWorkPeriodService.callCount).to.eq(0)
})
})
describe('Delete resource booking successfully', () => {
it('T15:Delete resource booking and cause work periods to be deleted ', async () => {
const data = testData.T15
const stubResourceBookingFindById = sinon.stub(ResourceBooking, 'findById').callsFake(async () => {
return data.resourceBooking.value
})
const stubWorkPeriodFindAll = sinon.stub(WorkPeriod, 'findAll').callsFake(async (criteria) => {
if (criteria.raw) {
return _.map(data.workPeriod.response, wp => wp.toJSON())
}
return data.workPeriod.response
})
await service.deleteResourceBooking(commonData.currentUser, data.resourceBooking.value.dataValues.id)
expect(stubResourceBookingFindById.calledOnce).to.be.true
expect(stubPostEvent.calledOnce).to.be.true
expect(stubWorkPeriodFindAll.called).to.be.true
expect(stubCreateWorkPeriodService.callCount).to.eq(0)
expect(stubUpdateWorkPeriodService.callCount).to.eq(0)
expect(stubDeleteWorkPeriodService.callCount).to.eq(2)
expect(stubDeleteWorkPeriodService.getCall(0).args[0]).to.deep.eq(data.workPeriod.request[0].id)
expect(stubDeleteWorkPeriodService.getCall(1).args[0]).to.deep.eq(data.workPeriod.request[1].id)
})
})
describe('Delete resource booking unsuccessfully', () => {
it('T16:Fail to delete resource booking with paid work periods', async () => {
const data = testData.T16
const stubResourceBookingFindById = sinon.stub(ResourceBooking, 'findById').callsFake(async () => {
return data.resourceBooking.value
})
const stubWorkPeriodFindAll = sinon.stub(WorkPeriod, 'findAll').callsFake(async (criteria) => {
if (criteria.raw) {
return _.map(data.workPeriod.response, wp => wp.toJSON())
}
return data.workPeriod.response
})
let error
try {
await service.deleteResourceBooking(commonData.currentUser, data.resourceBooking.value.dataValues.id)
} catch (err) {
error = err
}
expect(error.httpStatus).to.eq(data.error.httpStatus)
expect(error.message).to.eq(data.error.message)
expect(stubResourceBookingFindById.notCalled).to.be.true
expect(stubPostEvent.notCalled).to.be.true
expect(stubWorkPeriodFindAll.called).to.be.true
expect(stubCreateWorkPeriodService.callCount).to.eq(0)
expect(stubUpdateWorkPeriodService.callCount).to.eq(0)
expect(stubDeleteWorkPeriodService.callCount).to.eq(0)
})
})
describe('Get resource booking with/without nested fields', () => {
it('T17:Get resource booking from ES', async () => {
const data = testData.T17
const ESClient = commonData.ESClient
ESClient.get = () => {}
const esClientGet = sinon.stub(ESClient, 'get').callsFake(() => data.esClientGet)
const result = await service.getResourceBooking(commonData.userWithManagePermission, data.esClientGet.body._source.id, data.criteria)
expect(esClientGet.calledOnce).to.be.true
expect(result).to.deep.eq(data.esClientGet.body._source)
})
it('T18:Get resource booking from DB', async () => {
const data = testData.T18
const stubResourceBookingFindById = sinon.stub(ResourceBooking, 'findById').callsFake(async () => {
return data.resourceBooking.value
})
const result = await service.getResourceBooking(commonData.userWithManagePermission, data.resourceBooking.value.dataValues.id, data.criteria)
expect(stubResourceBookingFindById.calledOnce).to.be.true
expect(result).to.deep.eq(data.resourceBooking.value.dataValues)
})
it('T19:Fail to get resource booking with not allowed fields', async () => {
const data = testData.T19
let error
try {
await service.getResourceBooking(commonData.userWithManagePermission, data.id, data.criteria)
} catch (err) {
error = err
}
expect(error.httpStatus).to.eq(data.error.httpStatus)
expect(error.message).to.eq(data.error.message)
})
it('T20:Fail to get resource booking with memberRate', async () => {
const data = testData.T20
let error
try {
await service.getResourceBooking(commonData.regularUser, data.id, data.criteria)
} catch (err) {
error = err
}
expect(error.httpStatus).to.eq(data.error.httpStatus)
expect(error.message).to.eq(data.error.message)
})
it('T21:Fail to get resource booking with nested workPeriods', async () => {
const data = testData.T21
let error
try {
await service.getResourceBooking(commonData.currentUser, data.id, data.criteria)
} catch (err) {
error = err
}
expect(error.httpStatus).to.eq(data.error.httpStatus)
expect(error.message).to.eq(data.error.message)
})
it('T22:Fail to get resource booking without including projectId as a regularUser', async () => {
const data = testData.T22
let error
try {
await service.getResourceBooking(commonData.regularUser, data.id, data.criteria)
} catch (err) {
error = err
}
expect(error.httpStatus).to.eq(data.error.httpStatus)
expect(error.message).to.eq(data.error.message)
})
it('T23:Fail to get resource booking as a regularUser who is not member of project', async () => {
const data = testData.T23
const ESClient = commonData.ESClient
ESClient.get = () => {}
const esClientGet = sinon.stub(ESClient, 'get').callsFake(() => data.esClientGet)
const checkIsMemberOfProject = sinon.stub(helper, 'checkIsMemberOfProject').callsFake(() => {
throw new errors.UnauthorizedError(data.error.message)
})
let error
try {
await service.getResourceBooking(commonData.regularUser, data.id, data.criteria)
} catch (err) {
error = err
}
expect(esClientGet.calledOnce).to.be.true
expect(checkIsMemberOfProject.calledOnce).to.be.true
expect(error.httpStatus).to.eq(data.error.httpStatus)
expect(error.message).to.eq(data.error.message)
})
})
describe('Search resource booking with/without nested fields', () => {
it('T24:Search resource booking from ES', async () => {
const data = testData.T24
const ESClient = commonData.ESClient
ESClient.search = () => {}
const esClientSearch = sinon.stub(ESClient, 'search').callsFake(() => data.esClientSearch)
const result = await service.searchResourceBookings(commonData.userWithManagePermission, data.criteria)
expect(esClientSearch.calledOnce).to.be.true
expect(result).to.deep.eq(data.result)
})
it('T25:Search resource booking from DB', async () => {
const data = testData.T25
const ESClient = commonData.ESClient
ESClient.search = () => {}
const esClientSearch = sinon.stub(ESClient, 'search').callsFake(() => { throw new Error() })
const stubResourceBookingFindAll = sinon.stub(ResourceBooking, 'findAll').callsFake(async () => {
return data.resourceBookingFindAll
})
const stubResourceBookingCount = sinon.stub(ResourceBooking, 'count').callsFake(async () => {
return data.resourceBookingCount
})
const result = await service.searchResourceBookings(commonData.userWithManagePermission, data.criteria)
expect(esClientSearch.calledOnce).to.be.true
expect(stubResourceBookingFindAll.calledOnce).to.be.true
expect(stubResourceBookingCount.calledOnce).to.be.true
expect(result).to.deep.eq(data.result)
})
it('T26:Fail to search resource booking with not allowed fields', async () => {
const data = testData.T26
let error
try {
await service.searchResourceBookings(commonData.userWithManagePermission, data.criteria)
} catch (err) {
error = err
}
expect(error.httpStatus).to.eq(data.error.httpStatus)
expect(error.message).to.eq(data.error.message)
})
it('T27:Fail to search resource booking with memberRate', async () => {
const data = testData.T27
let error
try {
await service.searchResourceBookings(commonData.regularUser, data.criteria)
} catch (err) {
error = err
}
expect(error.httpStatus).to.eq(data.error.httpStatus)
expect(error.message).to.eq(data.error.message)
})
it('T28:Fail to search resource booking with nested workPeriods', async () => {
const data = testData.T28
let error
try {
await service.searchResourceBookings(commonData.currentUser, data.criteria)
} catch (err) {
error = err
}
expect(error.httpStatus).to.eq(data.error.httpStatus)
expect(error.message).to.eq(data.error.message)
})
it('T29:Fail to search resource booking without filtering by projectId as a regularUser', async () => {
const data = testData.T29
let error
try {
await service.searchResourceBookings(commonData.regularUser, data.criteria)
} catch (err) {
error = err
}
expect(error.httpStatus).to.eq(data.error.httpStatus)
expect(error.message).to.eq(data.error.message)
})
it('T30:Fail to search resource booking as a regularUser who is not member of project', async () => {
const data = testData.T30
const checkIsMemberOfProject = sinon.stub(helper, 'checkIsMemberOfProject').callsFake(() => {
throw new errors.UnauthorizedError(data.error.message)
})
let error
try {
await service.searchResourceBookings(commonData.regularUser, data.criteria)
} catch (err) {
error = err
}
expect(checkIsMemberOfProject.calledOnce).to.be.true
expect(error.httpStatus).to.eq(data.error.httpStatus)
expect(error.message).to.eq(data.error.message)
})
it('T31:Fail to search resource booking with filtering by nested field', async () => {
const data = testData.T31
let error
try {
await service.searchResourceBookings(commonData.userWithManagePermission, data.criteria)
} catch (err) {
error = err
}
expect(error.httpStatus).to.eq(data.error.httpStatus)
expect(error.message).to.eq(data.error.message)
})
it('T32:Fail to search resource booking with sorting by not included field', async () => {
const data = testData.T32
let error
try {
await service.searchResourceBookings(commonData.userWithManagePermission, data.criteria)
} catch (err) {
error = err
}
expect(error.httpStatus).to.eq(data.error.httpStatus)
expect(error.message).to.eq(data.error.message)
})
it('T33:Fail to search resource booking with sorting by nested field', async () => {
const data = testData.T33
let error
try {
await service.searchResourceBookings(commonData.userWithManagePermission, data.criteria)
} catch (err) {
error = err
}
expect(error.httpStatus).to.eq(data.error.httpStatus)
expect(error.message).to.eq(data.error.message)
})
it('T34:Fail to search resource booking with filtering by not included field', async () => {
const data = testData.T34
let error
try {
await service.searchResourceBookings(commonData.userWithManagePermission, data.criteria)
} catch (err) {
error = err
}
expect(error.httpStatus).to.eq(data.error.httpStatus)
expect(error.message).to.eq(data.error.message)
})
it('T35:Fail to search resource booking with filtering by not included nested field', async () => {
const data = testData.T35
let error
try {
await service.searchResourceBookings(commonData.userWithManagePermission, data.criteria)
} catch (err) {
error = err
}
expect(error.httpStatus).to.eq(data.error.httpStatus)
expect(error.message).to.eq(data.error.message)
})
})
describe('Update resource booking dates to null', () => {
it('T36:Should not allow setting dates to null if both dates are not null', async () => {
const data = testData.T36
const stubResourceBookingFindById = sinon.stub(ResourceBooking, 'findById').callsFake(async () => {
return data.resourceBooking.value
})
let error
try {
await service.partiallyUpdateResourceBooking(commonData.currentUser, data.resourceBooking.value.dataValues.id, data.resourceBooking.request)
} catch (err) {
error = err
}
expect(error.httpStatus).to.eq(data.error.httpStatus)
expect(error.message).to.eq(data.error.message)
expect(stubResourceBookingFindById.calledOnce).to.be.true
expect(stubPostEvent.notCalled).to.be.true
expect(stubCreateWorkPeriodService.callCount).to.eq(0)
expect(stubUpdateWorkPeriodService.callCount).to.eq(0)
expect(stubDeleteWorkPeriodService.callCount).to.eq(0)
})
it('T37:Should allow setting dates to null if one of the dates is null', async () => {
const data = testData.T37
const stubResourceBookingFindById = sinon.stub(ResourceBooking, 'findById').callsFake(async () => {
return data.resourceBooking.value
})
const stubWorkPeriodFindAll = sinon.stub(WorkPeriod, 'findAll').callsFake(async () => {
return data.workPeriod.response
})
const entity = await service.partiallyUpdateResourceBooking(commonData.currentUser, data.resourceBooking.value.dataValues.id, data.resourceBooking.request)
expect(entity).to.deep.eql(data.resourceBooking.response.toJSON())
expect(stubResourceBookingFindById.calledOnce).to.be.true
expect(stubPostEvent.calledOnce).to.be.true
expect(stubWorkPeriodFindAll.called).to.be.true
expect(stubCreateWorkPeriodService.callCount).to.eq(0)
expect(stubUpdateWorkPeriodService.callCount).to.eq(0)
expect(stubDeleteWorkPeriodService.callCount).to.eq(0)
})
it('T38:Should allow setting dates to null if both dates are null', async () => {
const data = testData.T38
const stubResourceBookingFindById = sinon.stub(ResourceBooking, 'findById').callsFake(async () => {
return data.resourceBooking.value
})
const stubWorkPeriodFindAll = sinon.stub(WorkPeriod, 'findAll').callsFake(async () => {
return data.workPeriod.response
})
const entity = await service.partiallyUpdateResourceBooking(commonData.currentUser, data.resourceBooking.value.dataValues.id, data.resourceBooking.request)
expect(entity).to.deep.eql(data.resourceBooking.response.toJSON())
expect(stubResourceBookingFindById.calledOnce).to.be.true
expect(stubPostEvent.calledOnce).to.be.true
expect(stubWorkPeriodFindAll.called).to.be.true
expect(stubCreateWorkPeriodService.callCount).to.eq(0)
expect(stubUpdateWorkPeriodService.callCount).to.eq(0)
expect(stubDeleteWorkPeriodService.callCount).to.eq(0)
})
})
})
<file_sep>/**
* Contains healthcheck routes
*/
module.exports = {
'/health': {
get: {
controller: 'HealthCheckController',
method: 'checkHealth'
}
}
}
<file_sep>/**
* Contains taas team routes
*/
const constants = require('../../app-constants')
module.exports = {
'/taas-teams': {
get: {
controller: 'TeamController',
method: 'searchTeams',
auth: 'jwt',
scopes: [constants.Scopes.READ_TAAS_TEAM]
}
},
'/taas-teams/email': {
post: {
controller: 'TeamController',
method: 'sendEmail',
auth: 'jwt',
scopes: [constants.Scopes.READ_TAAS_TEAM]
}
},
'/taas-teams/skills': {
get: {
controller: 'TeamController',
method: 'searchSkills'
}
},
'/taas-teams/me': {
get: {
controller: 'TeamController',
method: 'getMe',
auth: 'jwt',
scopes: [constants.Scopes.READ_TAAS_TEAM]
}
},
'/taas-teams/getSkillsByJobDescription': {
post: {
controller: 'TeamController',
method: 'getSkillsByJobDescription'
}
},
'/taas-teams/:id': {
get: {
controller: 'TeamController',
method: 'getTeam',
auth: 'jwt',
scopes: [constants.Scopes.READ_TAAS_TEAM]
}
},
'/taas-teams/:id/jobs/:jobId': {
get: {
controller: 'TeamController',
method: 'getTeamJob',
auth: 'jwt',
scopes: [constants.Scopes.READ_TAAS_TEAM]
}
},
'/taas-teams/:id/members': {
post: {
controller: 'TeamController',
method: 'addMembers',
auth: 'jwt',
scopes: [constants.Scopes.READ_TAAS_TEAM]
},
get: {
controller: 'TeamController',
method: 'searchMembers',
auth: 'jwt',
scopes: [constants.Scopes.READ_TAAS_TEAM]
}
},
'/taas-teams/:id/invites': {
get: {
controller: 'TeamController',
method: 'searchInvites',
auth: 'jwt',
scopes: [constants.Scopes.READ_TAAS_TEAM]
}
},
'/taas-teams/:id/members/:projectMemberId': {
delete: {
controller: 'TeamController',
method: 'deleteMember',
auth: 'jwt',
scopes: [constants.Scopes.READ_TAAS_TEAM]
}
},
'/taas-teams/sendRoleSearchRequest': {
post: {
controller: 'TeamController',
method: 'roleSearchRequest'
}
},
'/taas-teams/submitTeamRequest': {
post: {
controller: 'TeamController',
method: 'createTeam',
auth: 'jwt',
scopes: [constants.Scopes.CREATE_TAAS_TEAM]
}
},
'/taas-teams/members-suggest/:fragment': {
get: {
controller: 'TeamController',
method: 'suggestMembers',
auth: 'jwt',
scopes: []
}
},
'/taas-teams/calculateAmount': {
post: {
controller: 'TeamController',
method: 'calculateAmount',
auth: 'jwt',
scopes: [constants.Scopes.CREATE_TAAS_TEAM]
}
},
'/taas-teams/createPayment': {
post: {
controller: 'TeamController',
method: 'createPayment',
auth: 'jwt',
scopes: [constants.Scopes.CREATE_TAAS_TEAM]
}
},
'/taas-teams/isExternalMember': {
post: {
controller: 'TeamController',
method: 'isExternalMember',
auth: 'jwt',
scopes: []
}
}
}
<file_sep>/**
* Script for rendering email template
*/
const fs = require('fs')
const Handlebars = require('handlebars')
const path = require('path')
function render (filename, data) {
const source = fs.readFileSync(filename, 'utf8').toString()
const template = Handlebars.compile(source)
const output = template(data)
return output
}
const data = JSON.parse(fs.readFileSync(path.join(__dirname, '../../data/notifications-email-demo-data.json'), 'utf8'))
const key = process.argv.length >= 3 ? process.argv[2] : 'candidatesAvailableForReview'
if (!data[key]) {
console.error('Please provide a proper key which is present in notifications.json')
process.exit(1)
}
const outputDir = path.join(__dirname, '../../out')
const outputFile = path.join(__dirname, '../../out/notifications-email-template-with-data.html')
const result = render(path.join(__dirname, '../../data/notifications-email-template.html'), data[key])
if (!fs.existsSync(outputDir)) {
fs.mkdirSync(outputDir)
}
fs.writeFileSync(outputFile, result)
console.log(`Template has been rendered to: ${outputFile}`)
<file_sep>/**
* Import data from a json file into the db and index it in Elasticsearch
*/
const config = require('config')
const { Interview, WorkPeriod, WorkPeriodPayment } = require('../../src/models')
const logger = require('../../src/common/logger')
const helper = require('../../src/common/helper')
const jobCandidateModelOpts = {
modelName: 'JobCandidate',
include: [{
model: Interview,
as: 'interviews'
}]
}
const resourceBookingModelOpts = {
modelName: 'ResourceBooking',
include: [{
model: WorkPeriod,
as: 'workPeriods',
include: [{
model: WorkPeriodPayment,
as: 'payments'
}]
}]
}
const filePath = helper.getParamFromCliArgs() || config.DEFAULT_DATA_FILE_PATH
const userPrompt = `WARNING: this would remove existing data. Are you sure you want to import data from a json file with the path ${filePath}?`
const dataModels = ['Job', jobCandidateModelOpts, resourceBookingModelOpts, 'Role']
async function importData () {
await helper.promptUser(userPrompt, async () => {
try {
await helper.importData(filePath, dataModels, logger)
process.exit(0)
} catch (err) {
logger.logFullError(err, { component: 'importData' })
process.exit(1)
}
})
}
importData()
<file_sep>/*
* Handle events for ResourceBooking.
*/
const { Op } = require('sequelize')
const _ = require('lodash')
const config = require('config')
const models = require('../models')
const logger = require('../common/logger')
const helper = require('../common/helper')
const { AggregatePaymentStatus } = require('../../app-constants')
const JobService = require('../services/JobService')
const JobCandidateService = require('../services/JobCandidateService')
const WorkPeriodService = require('../services/WorkPeriodService')
const WorkPeriod = models.WorkPeriod
/**
* When ResourceBooking's status is changed to `placed`
* the corresponding JobCandidate record (with the same userId and jobId)
* should be updated with status `placed`
*
* @param {Object} payload the event payload
* @returns {undefined}
*/
async function placeJobCandidate (payload) {
if (_.get(payload, 'options.oldValue') && payload.value.status === payload.options.oldValue.status) {
logger.debug({
component: 'ResourceBookingEventHandler',
context: 'placeJobCandidate',
message: 'status not changed'
})
return
}
if (payload.value.status !== 'placed') {
logger.debug({
component: 'ResourceBookingEventHandler',
context: 'placeJobCandidate',
message: `not interested resource booking - status: ${payload.value.status}`
})
return
}
const resourceBooking = await models.ResourceBooking.findById(payload.value.id)
if (!resourceBooking.jobId) {
logger.debug({
component: 'ResourceBookingEventHandler',
context: 'placeJobCandidate',
message: `id: ${resourceBooking.id} resource booking without jobId - ignored`
})
return
}
const candidates = await models.JobCandidate.findAll({
where: {
jobId: resourceBooking.jobId,
userId: resourceBooking.userId,
status: {
[Op.not]: 'placed'
}
}
})
await Promise.all(candidates.map(candidate => JobCandidateService.partiallyUpdateJobCandidate(
helper.getAuditM2Muser(),
candidate.id,
{ status: 'placed' }
).then(result => {
logger.info({
component: 'ResourceBookingEventHandler',
context: 'placeJobCandidate',
message: `id: ${result.id} candidate got selected.`
})
})))
}
/**
* When ResourceBooking's status is changed to `placed`
* send notifications to user
*
* @param {Object} payload the event payload
* @returns {undefined}
*/
async function sendPlacedNotifications (payload) {
if (payload.value.status !== 'placed' || _.get(payload, 'options.oldValue.status') === 'placed') {
return
}
const resourceBooking = await models.ResourceBooking.findById(payload.value.id)
const template = helper.getEmailTemplatesForKey('notificationEmailTemplates')['taas.notification.resource-booking-placed']
const project = await helper.getProjectById({ isMachine: true }, resourceBooking.projectId)
const user = await helper.getUserById(resourceBooking.userId)
const job = await models.Job.findById(resourceBooking.jobId)
const recipients = _.map(project.members, m => _.pick(m, 'userId'))
const jobUrl = `${config.TAAS_APP_URL}/${project.id}/positions/${job.id}`
const teamUrl = `${config.TAAS_APP_URL}/${project.id}`
const data = {
subject: template.subject,
teamName: project.name,
teamUrl,
jobTitle: job.title,
jobUrl,
userHandle: user.handle,
startDate: resourceBooking.startDate,
endDate: resourceBooking.endDate,
notificationType: {
resourceBookingPlaced: true
},
description: 'Resource Booking is Placed'
}
data.subject = helper.substituteStringByObject(data.subject, data)
const emailData = {
serviceId: 'email',
type: 'taas.notification.resource-booking-placed',
details: {
from: template.from,
recipients,
data,
sendgridTemplateId: template.sendgridTemplateId,
version: 'v3'
}
}
const webData = {
serviceId: 'web',
type: 'taas.notification.resource-booking-placed',
details: {
recipients,
contents: { teamName: project.name, projectId: project.id, userHandle: user.handle, jobTitle: job.title },
version: 1
}
}
await helper.postEvent(config.NOTIFICATIONS_CREATE_TOPIC, {
notifications: [emailData, webData]
})
logger.debug({
component: 'ResourceBookingEventHandler',
context: 'placeJobCandidate',
message: `send notifications, teamName: ${project.name}, jobTitle: ${job.title}, projectId: ${project.id}, userHandle: ${user.handle}`
})
}
/**
* Update the status of the Job to assigned when it positions requirement is fulfilled.
*
* @param {Object} payload the event payload
* @returns {undefined}
*/
async function assignJob (payload) {
if (_.get(payload, 'options.oldValue') && payload.value.status === payload.options.oldValue.status) {
logger.debug({
component: 'ResourceBookingEventHandler',
context: 'assignJob',
message: 'status not changed'
})
return
}
if (payload.value.status !== 'placed') {
logger.debug({
component: 'ResourceBookingEventHandler',
context: 'assignJob',
message: `not interested resource booking - status: ${payload.value.status}`
})
return
}
const resourceBooking = await models.ResourceBooking.findById(payload.value.id)
if (!resourceBooking.jobId) {
logger.debug({
component: 'ResourceBookingEventHandler',
context: 'assignJob',
message: `id: ${resourceBooking.id} resource booking without jobId - ignored`
})
return
}
const job = await models.Job.findById(resourceBooking.jobId)
if (job.status === 'placed' || job.status === 'assigned') {
logger.debug({
component: 'ResourceBookingEventHandler',
context: 'assignJob',
message: `job with projectId ${job.projectId} is already ${job.status}`
})
return
}
const resourceBookings = await models.ResourceBooking.findAll({
where: {
jobId: job.id,
status: 'placed'
}
})
logger.debug({
component: 'ResourceBookingEventHandler',
context: 'assignJob',
message: `the number of placed resource bookings is ${resourceBookings.length} - the numPositions of the job is ${job.numPositions}`
})
if (job.numPositions === resourceBookings.length) {
await JobService.partiallyUpdateJob(helper.getAuditM2Muser(), job.id, { status: 'assigned' })
logger.info({ component: 'ResourceBookingEventHandler', context: 'assignJob', message: `job ${job.id} is assigned` })
}
}
/**
* When a ResourceBooking is created, workPeriods that cover each weeks
* of resource booking should be also created
* @param {object} payload the event payload
* @returns {undefined}
*/
async function createWorkPeriods (payload) {
// if startDate or endDate is not provided then we can't create work period
if (_.isNil(payload.value.startDate) || _.isNil(payload.value.endDate)) {
logger.debug({
component: 'ResourceBookingEventHandler',
context: 'createWorkPeriods',
message: `id: ${payload.value.id} resource booking without endDate or startDate - ignored`
})
return
}
// collect dates of work periods
const workPeriodDates = helper.extractWorkPeriods(payload.value.startDate, payload.value.endDate)
await _createWorkPeriods(workPeriodDates, payload.value.id)
logger.debug({
component: 'ResourceBookingEventHandler',
context: 'createWorkPeriods',
message: `WorkPeriods created for resource booking with id: ${payload.value.id}`
})
}
/**
* When a ResourceBooking is updated, workPeriods related to
* that ResourceBooking should be updated also.
* This function finds out which workPeriods should be deleted,
* which ones should be created and which ones should be updated
* @param {object} payload the event payload
* @returns {undefined}
*/
async function updateWorkPeriods (payload) {
// find related workPeriods to evaluate the changes
const workPeriods = await WorkPeriod.findAll({
where: {
resourceBookingId: payload.value.id
},
raw: true
})
// gather workPeriod dates
const newWorkPeriods = helper.extractWorkPeriods(
_.isUndefined(payload.value.startDate) ? payload.options.oldValue.startDate : payload.value.startDate,
_.isUndefined(payload.value.endDate) ? payload.options.oldValue.endDate : payload.value.endDate)
// find which workPeriods should be removed
const workPeriodsToRemove = _.differenceBy(workPeriods, newWorkPeriods, 'startDate')
// find which workperiods should be created
const workPeriodsToAdd = _.differenceBy(newWorkPeriods, workPeriods, 'startDate')
// find which workperiods' daysWorked property should be evaluated for changes
const intersectedWorkPeriods = _.intersectionBy(newWorkPeriods, workPeriods, 'startDate')
let workPeriodsToUpdate = []
if (intersectedWorkPeriods.length > 0) {
// We only need check for first and last ones of intersected workPeriods
// The ones at the middle won't be updated and their daysWorked value will stay the same
if (payload.options.oldValue.startDate !== payload.value.startDate) {
const firstWeek = _.minBy(intersectedWorkPeriods, 'startDate')
const originalFirstWeek = _.find(workPeriods, ['startDate', firstWeek.startDate])
const existentFirstWeek = _.minBy(workPeriods, 'startDate')
// recalculate daysWorked for the first week of existent workPeriods and daysWorked have changed
if (firstWeek.startDate === existentFirstWeek.startDate && firstWeek.daysWorked !== existentFirstWeek.daysWorked &&
existentFirstWeek.daysPaid <= firstWeek.daysWorked) {
workPeriodsToUpdate.push(_.assign(firstWeek, { id: originalFirstWeek.id }))
// if first of intersected workPeriods is not the first one of existent workPeriods
// we only check if it's daysWorked exceeds the possible maximum
} else if (originalFirstWeek.daysWorked > firstWeek.daysWorked &&
originalFirstWeek.daysPaid <= firstWeek.daysWorked) {
workPeriodsToUpdate.push(_.assign(firstWeek, { id: originalFirstWeek.id }))
}
}
if (payload.options.oldValue.endDate !== payload.value.endDate) {
const lastWeek = _.maxBy(intersectedWorkPeriods, 'startDate')
const originalLastWeek = _.find(workPeriods, ['startDate', lastWeek.startDate])
const existentLastWeek = _.maxBy(workPeriods, 'startDate')
// recalculate daysWorked for the last week of existent workPeriods and daysWorked have changed
if (lastWeek.startDate === existentLastWeek.startDate && lastWeek.daysWorked !== existentLastWeek.daysWorked &&
existentLastWeek.daysPaid <= lastWeek.daysWorked) {
workPeriodsToUpdate.push(_.assign(lastWeek, { id: originalLastWeek.id }))
// if last of intersected workPeriods is not the last one of existent workPeriods
// we only check if it's daysWorked exceeds the possible maximum
} else if (originalLastWeek.daysWorked > lastWeek.daysWorked &&
originalLastWeek.daysPaid <= lastWeek.daysWorked) {
workPeriodsToUpdate.push(_.assign(lastWeek, { id: originalLastWeek.id }))
}
}
}
// if intersected WP count is 1, this can result to duplicated WorkPeriods.
// We should choose the one with higher daysWorked because, it's more likely
// the WP we applied "first/last one of existent WPs" logic above.
if (workPeriodsToUpdate.length === 2) {
if (workPeriodsToUpdate[0].startDate === workPeriodsToUpdate[1].startDate) {
workPeriodsToUpdate = [_.maxBy(workPeriodsToUpdate, 'daysWorked')]
}
}
if (workPeriodsToRemove.length === 0 && workPeriodsToAdd.length === 0 && workPeriodsToUpdate.length === 0) {
logger.debug({
component: 'ResourceBookingEventHandler',
context: 'updateWorkPeriods',
message: `id: ${payload.value.id} resource booking has no change in dates that affect work periods - ignored`
})
return
}
if (workPeriodsToRemove.length > 0) {
await _deleteWorkPeriods(workPeriodsToRemove)
logger.debug({
component: 'ResourceBookingEventHandler',
context: 'updateWorkPeriods',
message: `Old WorkPeriods deleted for resource booking with id: ${payload.value.id}`
})
}
if (workPeriodsToAdd.length > 0) {
await _createWorkPeriods(workPeriodsToAdd, payload.value.id)
logger.debug({
component: 'ResourceBookingEventHandler',
context: 'updateWorkPeriods',
message: `New WorkPeriods created for resource booking with id: ${payload.value.id}`
})
}
if (workPeriodsToUpdate.length > 0) {
await _updateWorkPeriods(workPeriodsToUpdate)
logger.debug({
component: 'ResourceBookingEventHandler',
context: 'updateWorkPeriods',
message: `WorkPeriods updated for resource booking with id: ${payload.value.id}`
})
}
}
/**
* When a ResourceBooking is deleted, workPeriods related to
* that ResourceBooking should also be deleted
* @param {object} payload the event payload
* @returns {undefined}
*/
async function deleteWorkPeriods (payload) {
// find related workPeriods to delete
const workPeriods = await WorkPeriod.findAll({
where: {
resourceBookingId: payload.value.id
},
raw: true
})
if (workPeriods.length === 0) {
logger.debug({
component: 'ResourceBookingEventHandler',
context: 'deleteWorkPeriods',
message: `id: ${payload.value.id} resource booking has no workPeriods - ignored`
})
return
}
await _deleteWorkPeriods(workPeriods)
logger.debug({
component: 'ResourceBookingEventHandler',
context: 'deleteWorkPeriods',
message: `WorkPeriods deleted for resource booking with id: ${payload.value.id}`
})
}
/**
* Calls WorkPeriodService to create workPeriods
* @param {Array<{startDate:Date, endDate:Date}>} periods work period data
* @param {string} resourceBookingId resourceBookingId of work period
* @returns {undefined}
*/
async function _createWorkPeriods (periods, resourceBookingId) {
for (const period of periods) {
await WorkPeriodService.createWorkPeriod(
{
resourceBookingId: resourceBookingId,
startDate: period.startDate,
endDate: period.endDate,
daysWorked: period.daysWorked,
paymentStatus: period.daysWorked === 0 ? AggregatePaymentStatus.NO_DAYS : AggregatePaymentStatus.PENDING
})
}
}
/**
* Calls WorkPeriodService to update workPeriods
* @param {Array<{daysWorked:number}>} periods work period data
* @returns {undefined}
*/
async function _updateWorkPeriods (periods) {
for (const period of periods) {
await WorkPeriodService.partiallyUpdateWorkPeriod(helper.getAuditM2Muser(),
period.id,
{
daysWorked: period.daysWorked
})
}
}
/**
* Calls WorkPeriodService to delete workPeriods
* @param {Array<{id:string}>} workPeriods work period objects
* @returns {undefined}
*/
async function _deleteWorkPeriods (workPeriods) {
for (const period of workPeriods) {
await WorkPeriodService.deleteWorkPeriod(period.id)
}
}
/**
* Process resource booking create event.
*
* @param {Object} payload the event payload
* @returns {undefined}
*/
async function processCreate (payload) {
await placeJobCandidate(payload)
await assignJob(payload)
await createWorkPeriods(payload)
await sendPlacedNotifications(payload)
}
/**
* Process resource booking update event.
*
* @param {Object} payload the event payload
* @returns {undefined}
*/
async function processUpdate (payload) {
await placeJobCandidate(payload)
await assignJob(payload)
await updateWorkPeriods(payload)
await sendPlacedNotifications(payload)
}
/**
* Process resource booking delete event.
*
* @param {Object} payload the event payload
* @returns {undefined}
*/
async function processDelete (payload) {
await deleteWorkPeriods(payload)
}
module.exports = {
processCreate,
processUpdate,
processDelete
}
<file_sep>/**
* Controller for ResourceBooking endpoints
*/
const HttpStatus = require('http-status-codes')
const _ = require('lodash')
const service = require('../services/ResourceBookingService')
const helper = require('../common/helper')
/**
* Get resourceBooking by id
* @param req the request
* @param res the response
*/
async function getResourceBooking (req, res) {
res.send(await service.getResourceBooking(req.authUser, req.params.id, req.query))
}
/**
* Create resourceBooking
* @param req the request
* @param res the response
*/
async function createResourceBooking (req, res) {
res.send(await service.createResourceBooking(req.authUser, req.body))
}
/**
* Partially update resourceBooking by id
* @param req the request
* @param res the response
*/
async function partiallyUpdateResourceBooking (req, res) {
res.send(await service.partiallyUpdateResourceBooking(req.authUser, req.params.id, req.body))
}
/**
* Fully update resourceBooking by id
* @param req the request
* @param res the response
*/
async function fullyUpdateResourceBooking (req, res) {
res.send(await service.fullyUpdateResourceBooking(req.authUser, req.params.id, req.body))
}
/**
* Delete resourceBooking by id
* @param req the request
* @param res the response
*/
async function deleteResourceBooking (req, res) {
await service.deleteResourceBooking(req.authUser, req.params.id)
res.status(HttpStatus.NO_CONTENT).end()
}
/**
* Search resourceBookings
* @param req the request
* @param res the response
*/
async function searchResourceBookings (req, res) {
const query = { ...req.query, jobIds: _.get(req, 'body.jobIds', []) }
const result = await service.searchResourceBookings(req.authUser, query)
helper.setResHeaders(req, res, result)
res.send(result.result)
}
module.exports = {
getResourceBooking,
createResourceBooking,
partiallyUpdateResourceBooking,
fullyUpdateResourceBooking,
deleteResourceBooking,
searchResourceBookings
}
<file_sep>/**
* Create index in Elasticsearch
*/
const config = require('config')
const logger = require('../../src/common/logger')
const helper = require('../../src/common/helper')
const indices = [
config.get('esConfig.ES_INDEX_JOB'),
config.get('esConfig.ES_INDEX_JOB_CANDIDATE'),
config.get('esConfig.ES_INDEX_RESOURCE_BOOKING'),
config.get('esConfig.ES_INDEX_ROLE')
]
const userPrompt = `WARNING: Are you sure want to create the following elasticsearch indices: ${indices}?`
async function createIndex () {
await helper.promptUser(userPrompt, async () => {
for (const index of indices) {
try {
await helper.createIndex(index, logger)
} catch (err) {
logger.logFullError(err, { component: 'createIndex' })
process.exit(1)
}
}
process.exit(0)
})
}
createIndex()
<file_sep>/* eslint-disable no-unreachable */
/**
* This service provides operations of ResourceBooking.
*/
const _ = require('lodash')
const Joi = require('joi').extend(require('@joi/date'))
const config = require('config')
const HttpStatus = require('http-status-codes')
const { Op } = require('sequelize')
const { v4: uuid } = require('uuid')
const helper = require('../common/helper')
const logger = require('../common/logger')
const errors = require('../common/errors')
const models = require('../models')
const {
processCreate,
processUpdate,
processDelete
} = require('../esProcessors/ResourceBookingProcessor')
const constants = require('../../app-constants')
const moment = require('moment')
const ResourceBooking = models.ResourceBooking
const WorkPeriod = models.WorkPeriod
const WorkPeriodPayment = models.WorkPeriodPayment
const esClient = helper.getESClient()
const cachedModelFields = _cacheModelFields()
const sequelize = models.sequelize
/**
* Get the fields of the ResourceBooking model and the nested WorkPeriod model
* @returns {Array<string>} array of field names
*/
function _cacheModelFields () {
const resourceBookingFields = _.keys(ResourceBooking.rawAttributes)
const workPeriodFields = _.map(_.keys(WorkPeriod.rawAttributes), key => `workPeriods.${key}`)
const workPeriodPaymentFields = _.map(_.keys(WorkPeriodPayment.rawAttributes), key => `workPeriods.payments.${key}`)
return [...resourceBookingFields, 'workPeriods', ...workPeriodFields, 'workPeriods.payments', ...workPeriodPaymentFields]
}
/**
* Check user scopes for getting workPeriods
* @param {Object} currentUser the user who perform this operation.
* @returns {Boolean} true if user is machine and has read/all workPeriod scopes
*/
function _checkUserScopesForGetWorkPeriods (currentUser) {
const getWorkPeriodsScopes = [constants.Scopes.READ_WORK_PERIOD, constants.Scopes.ALL_WORK_PERIOD]
return currentUser.isMachine && helper.checkIfExists(getWorkPeriodsScopes, currentUser.scopes)
}
/**
* Check user scopes for getting workPeriodPayments
* @param {Object} currentUser the user who perform this operation.
* @returns {Boolean} true if user is machine and has read/all workPeriodPayment scopes
*/
function _checkUserScopesForGetWorkPeriodPayments (currentUser) {
const getWorkPeriodPaymentsScopes = [constants.Scopes.READ_WORK_PERIOD_PAYMENT, constants.Scopes.ALL_WORK_PERIOD_PAYMENT]
return currentUser.isMachine && helper.checkIfExists(getWorkPeriodPaymentsScopes, currentUser.scopes)
}
/**
* Evaluates the criterias and returns the fields
* to be returned as a result of GET endpoints
* @param {Object} currentUser the user who perform this operation.
* @param {Object} criteria the query criterias
* @returns {Object} result
* @returns {Array<string>} result.include field names to include
* @returns {Array<string>} result.fieldsRB ResourceBooking field names to include
* @returns {Array<string>} result.fieldsWP WorkPeriod field names to include
* @returns {Array<string>} result.fieldsWPP WorkPeriodPayment field names to include
* @returns {Array<string>} result.excludeRB ResourceBooking field names to exclude
* @returns {Array<string>} result.excludeWP WorkPeriod field names to exclude
* @returns {Boolean} result.regularUser is current user a regular user?
* @returns {Boolean} result.allWorkPeriods will all WorkPeriod fields be returned?
* @returns {Boolean} result.withWorkPeriods does fields include any WorkPeriod field?
* @returns {Boolean} result.allWorkPeriodPayments will all WorkPeriodPayment fields be returned?
* @returns {Boolean} result.withWorkPeriodPayments does fields include any WorkPeriodPayment field?
* @returns {Boolean} result.sortByWP will the sorting be done by WorkPeriod field?
* @throws {BadRequestError}
* @throws {ForbiddenError}
*/
function _checkCriteriaAndGetFields (currentUser, criteria) {
const result = {
include: [],
fieldsRB: [],
fieldsWP: [],
fieldsWPP: [],
excludeRB: [],
excludeWP: [],
excludeWPP: []
}
const fields = criteria.fields
const sort = criteria.sortBy
const onlyResourceBooking = _.isUndefined(fields)
const query = onlyResourceBooking ? [] : _.uniq(_.filter(_.map(_.split(fields, ','), _.trim), field => !_.isEmpty(field)))
const notAllowedFields = _.difference(query, cachedModelFields)
// Check if fields criteria has a field name that RB or WP models don't have
if (notAllowedFields.length > 0) {
throw new errors.BadRequestError(`${notAllowedFields} are not allowed`)
}
// Check if user is a regular user. Regular users can't get ResourceBookings for which they are not a member
result.regularUser = !currentUser.hasManagePermission && !currentUser.isMachine && !currentUser.isConnectManager
// Check if all WorkPeriod fields will be returned
result.allWorkPeriods = _.some(query, q => q === 'workPeriods')
// Check if all WorkPeriodPayment fields will be returned
result.allWorkPeriodPayments = result.allWorkPeriods || _.some(query, q => q === 'workPeriods.payments')
// Split the fields criteria into ResourceBooking and WorkPeriod fields
_.forEach(query, q => {
if (_.includes(q, 'payments.')) { result.fieldsWPP.push(q) } else if (q !== 'workPeriods.payments' && _.includes(q, '.')) { result.fieldsWP.push(q) } else if (q !== 'workPeriods' && q !== 'workPeriods.payments') { result.fieldsRB.push(q) }
})
// Check if any WorkPeriod field will be returned
result.withWorkPeriods = result.allWorkPeriods || result.fieldsWP.length > 0 ||
result.allWorkPeriodPayments || result.fieldsWPP.length > 0
// Check if any WorkPeriodPayment field will be returned
result.withWorkPeriodPayments = result.allWorkPeriodPayments || result.fieldsWPP.length > 0
// Extract the filters from criteria parameter
let filters = _.filter(Object.keys(criteria), key => _.indexOf(['fromDb', 'fields', 'page', 'perPage', 'sortBy', 'sortOrder', 'jobIds', 'workPeriods.isFirstWeek', 'workPeriods.isLastWeek'], key) === -1)
filters = _.map(filters, f => {
if (f === 'projectIds') {
return 'projectId'
} return f
})
const filterRB = []
const filterWP = []
const filterWPP = []
// Split the filters criteria into ResourceBooking, WorkPeriod and WorkPeriodPayment filters
_.forEach(filters, q => { if (_.includes(q, 'payments.')) { filterWPP.push(q) } else if (_.includes(q, '.')) { filterWP.push(q) } else { filterRB.push(q) } })
// Check if filter criteria has any WorkPeriod or payments filter
const filterHasWorkPeriods = filterWP.length > 0 || filterWPP.length > 0
// Check if sorting will be done by WorkPeriod field
result.sortByWP = _.split(sort, '.')[0] === 'workPeriods'
// Check if the current user has the right to see the memberRate
const canSeeMemberRate = currentUser.hasManagePermission || currentUser.isMachine
// If current user has no right to see the memberRate then it's excluded.
// "currentUser.isMachine" to be true is not enough to return "workPeriods.memberRate"
// but returning "workPeriod" will be evaluated later
if (!canSeeMemberRate) {
result.excludeRB.push('paymentTotal')
result.excludeWP.push('workPeriods.paymentTotal')
result.excludeWPP.push('workPeriods.payments')
}
// if "fields" is not included in cretia, then only ResourceBooking model will be returned
// No further evaluation is required as long as the criteria does not include a WorkPeriod filter or a WorkPeriod sorting condition
if (onlyResourceBooking) {
if (filterHasWorkPeriods || result.sortByWP) {
throw new errors.BadRequestError('Can not filter or sort by some field which is not included in fields')
}
result.excludeWP.push('workPeriods')
return result
}
// Include sorting condition in filters
if (result.sortByWP) {
// It is required to filter by "workPeriods.startDate" or "workPeriods.endDate" if sorting will be done by WorkPeriod field
if (!_.some(filterWP, f => _.includes(['workPeriods.startDate', 'workPeriods.endDate'], f))) {
throw new errors.BadRequestError('Can not sort by workPeriod field without filtering by workPeriods.startDate or workPeriods.endDate')
}
filterWP.push(sort)
} else if (!_.isUndefined(sort) && sort !== 'id') {
filterRB.push(sort)
}
// Check If it's tried to filter or sort by some field which should not be included as per rules of fields param
if (_.difference(filterRB, result.fieldsRB).length > 0) {
throw new errors.BadRequestError('Can not filter or sort by ResourceBooking field which is not included in fields')
}
// Check If it's tried to filter or sort by some field which should not be included as per rules of fields param
if (!result.allWorkPeriods && _.difference(filterWP, result.fieldsWP).length > 0) {
throw new errors.BadRequestError('Can not filter or sort by WorkPeriod field which is not included in fields')
}
// Check If it's tried to filter or sort by some field which should not be included as per rules of fields param
if (!result.allWorkPeriodPayments && _.difference(filterWPP, result.fieldsWPP).length > 0) {
throw new errors.BadRequestError('Can not filter by WorkPeriodPayment field which is not included in fields')
}
// Check if the current user has no right to see the memberRate and memberRate is included in fields parameter
if (!canSeeMemberRate && _.some(query, q => _.includes(['memberRate', 'workPeriods.paymentTotal', 'workPeriods.payments'], q))) {
throw new errors.ForbiddenError('You don\'t have access to view memberRate, paymentTotal and payments')
}
// Check if the current user has no right to see the workPeriods and workPeriods is included in fields parameter
if (currentUser.isMachine && result.withWorkPeriods && !_checkUserScopesForGetWorkPeriods(currentUser)) {
throw new errors.ForbiddenError('You don\'t have access to view workPeriods')
}
// Check if the current user has no right to see the workPeriodPayments and workPeriodPayments is included in fields parameter
if (currentUser.isMachine && result.withWorkPeriodPayments && !_checkUserScopesForGetWorkPeriodPayments(currentUser)) {
throw new errors.ForbiddenError('You don\'t have access to view workPeriodPayments')
}
result.include.push(...query)
return result
}
/**
* Check user permission for getting resource booking.
*
* @param {Object} currentUser the user who perform this operation.
* @param {String} projectId the project id
* @returns {undefined}
*/
async function _checkUserPermissionForGetResourceBooking (currentUser, projectId) {
await helper.checkIsMemberOfProject(currentUser.userId, projectId)
}
/**
* Check if any work period is paid and tried to be deleted
*
* @param {string} resourceBookingId workPeriod object array.
* @param {Object} [oldValue] old value of resourceBooking object.
* @param {Object} [newValue] new value of resourceBooking object.
* @throws {BadRequestError}
*/
async function _ensurePaidWorkPeriodsNotDeleted (resourceBookingId, oldValue, newValue) {
function _checkForPaidWorkPeriods (workPeriods) {
const paidWorkPeriods = _.filter(workPeriods, workPeriod => {
// filter by WP and WPP status
return _.some(workPeriod.payments, payment => constants.ActiveWorkPeriodPaymentStatuses.indexOf(payment.status) !== -1)
})
if (paidWorkPeriods.length > 0) {
throw new errors.BadRequestError(`Can't delete associated WorkPeriods ${_.map(paidWorkPeriods, workPeriod => workPeriod.id)}
as they have associated WorkPeriodsPayment with one of statuses ${constants.ActiveWorkPeriodPaymentStatuses.join(', ')}.`)
}
}
// find related workPeriods to evaluate the changes
// We don't need to include WPP because WPP's status changes should
// update WP's status. In case of any bug or slow processing, it's better to check both WP
// and WPP status for now.
let workPeriods = await WorkPeriod.findAll({
where: {
resourceBookingId: resourceBookingId
},
attributes: ['id', 'paymentStatus', 'startDate', 'endDate', 'daysPaid'],
include: [{
model: WorkPeriodPayment,
as: 'payments',
required: false,
attributes: ['status']
}]
})
workPeriods = _.map(workPeriods, wp => wp.toJSON())
// oldValue and newValue are not provided at deleteResourceBooking process
if (_.isUndefined(oldValue) || _.isUndefined(newValue)) {
_checkForPaidWorkPeriods(workPeriods)
return
}
// We should not be able to change status of ResourceBooking to 'cancelled'
// if there is at least one associated Work Period with paymentStatus 'partially-completed', 'completed' or 'in-progress',
// or any of it's WorkPeriodsPayment has status 'completed' or 'in-progress'.
if (oldValue.status !== 'cancelled' && newValue.status === 'cancelled') {
_checkForPaidWorkPeriods(workPeriods)
// we have already checked all existing workPeriods
return
}
// gather workPeriod dates from provided dates
const newWorkPeriods = helper.extractWorkPeriods(
_.isUndefined(newValue.startDate) ? oldValue.startDate : newValue.startDate,
_.isUndefined(newValue.endDate) ? oldValue.endDate : newValue.endDate)
// find which workPeriods should be removed
const workPeriodsToRemove = _.differenceBy(workPeriods, newWorkPeriods, 'startDate')
// we can't delete workperiods with paymentStatus 'partially-completed', 'completed' or 'in-progress',
// or any of it's WorkPeriodsPayment has status 'completed' or 'in-progress'.
_checkForPaidWorkPeriods(workPeriodsToRemove)
}
/**
* Get resourceBooking by id
* @param {Object} currentUser the user who perform this operation.
* @param {String} id the resourceBooking id
* @param {Object} criteria object including fields and fromDb criteria
* @returns {Object} the resourceBooking
*/
async function getResourceBooking (currentUser, id, criteria) {
// Evaluate criteria and extract the fields to be included or excluded
const queryOpt = _checkCriteriaAndGetFields(currentUser, criteria)
// We don't allow regular user to exclude projectId from result
if (queryOpt.regularUser && queryOpt.include.length > 0 && !_.includes(queryOpt.include, 'projectId')) {
throw new errors.ForbiddenError('Not allowed without including "projectId"')
}
if (!criteria.fromDb) {
try {
const resourceBooking = await esClient.get({
index: config.esConfig.ES_INDEX_RESOURCE_BOOKING,
id,
_source_includes: [...queryOpt.include],
_source_excludes: [...queryOpt.excludeRB, ...queryOpt.excludeWP, ...queryOpt.excludeWPP]
})
if (queryOpt.regularUser) {
await _checkUserPermissionForGetResourceBooking(currentUser, resourceBooking.body._source.projectId) // check user permission
}
return resourceBooking.body._source
} catch (err) {
if (helper.isDocumentMissingException(err)) {
throw new errors.NotFoundError(`id: ${id} "ResourceBooking" not found`)
}
if (err.httpStatus === HttpStatus.UNAUTHORIZED) {
throw err
}
logger.logFullError(err, { component: 'ResourceBookingService', context: 'getResourceBooking' })
}
}
logger.info({ component: 'ResourceBookingService', context: 'getResourceBooking', message: 'try to query db for data' })
let resourceBooking = await ResourceBooking.findById(id, queryOpt)
resourceBooking = resourceBooking.toJSON()
// omit workPeriod.id if fields criteria has no workPeriod field but have workPeriodPayment field
if (queryOpt.withWorkPeriods && !queryOpt.allWorkPeriods && (!queryOpt.fieldsWP || queryOpt.fieldsWP.length === 0)) {
if (_.isArray(resourceBooking.workPeriods)) {
resourceBooking.workPeriods = _.map(resourceBooking.workPeriods, wp => _.omit(wp, 'id'))
}
}
if (queryOpt.regularUser) {
await _checkUserPermissionForGetResourceBooking(currentUser, resourceBooking.projectId) // check user permission
}
return resourceBooking
}
getResourceBooking.schema = Joi.object().keys({
currentUser: Joi.object().required(),
id: Joi.string().guid().required(),
criteria: Joi.object().keys({
fromDb: Joi.boolean().default(false),
fields: Joi.string()
})
}).required()
/**
* Create resourceBooking
* @params {Object} currentUser the user who perform this operation
* @params {Object} resourceBooking the resourceBooking to be created
* @returns {Object} the created resourceBooking
*/
async function createResourceBooking (currentUser, resourceBooking) {
// check permission
if (!currentUser.hasManagePermission && !currentUser.isMachine) {
throw new errors.ForbiddenError('You are not allowed to perform this action!')
}
if (resourceBooking.jobId) {
await helper.ensureJobById(resourceBooking.jobId) // ensure job exists
}
await helper.ensureUserById(resourceBooking.userId) // ensure user exists
resourceBooking.id = uuid()
resourceBooking.createdBy = await helper.getUserId(currentUser.userId)
let entity
try {
await sequelize.transaction(async (t) => {
const created = await ResourceBooking.create(resourceBooking, { transaction: t })
entity = created.toJSON()
await processCreate(entity)
})
} catch (e) {
if (entity) {
helper.postErrorEvent(config.TAAS_ERROR_TOPIC, entity, 'resourcebooking.create')
}
throw e
}
await helper.postEvent(config.TAAS_RESOURCE_BOOKING_CREATE_TOPIC, entity)
return entity
}
createResourceBooking.schema = Joi.object().keys({
currentUser: Joi.object().required(),
resourceBooking: Joi.object().keys({
status: Joi.resourceBookingStatus().default('placed'),
projectId: Joi.number().integer().required(),
userId: Joi.string().uuid().required(),
jobId: Joi.string().uuid().allow(null),
sendWeeklySurvey: Joi.boolean().default(true),
startDate: Joi.date().format('YYYY-MM-DD').allow(null),
endDate: Joi.date().format('YYYY-MM-DD').when('startDate', {
is: Joi.exist(),
then: Joi.date().format('YYYY-MM-DD').allow(null).min(Joi.ref('startDate')
).messages({
'date.min': 'endDate cannot be earlier than startDate'
}),
otherwise: Joi.date().format('YYYY-MM-DD').allow(null)
}),
memberRate: Joi.number().allow(null),
customerRate: Joi.number().allow(null),
rateType: Joi.rateType().required(),
billingAccountId: Joi.number().allow(null)
}).required()
}).required()
/**
* Update resourceBooking
* @param {Object} currentUser the user who perform this operation
* @param {String} id the resourceBooking id
* @param {Object} data the data to be updated
* @returns {Object} the updated resourceBooking
*/
async function updateResourceBooking (currentUser, id, data) {
// check permission
if (!currentUser.hasManagePermission && !currentUser.isMachine) {
throw new errors.ForbiddenError('You are not allowed to perform this action!')
}
const resourceBooking = await ResourceBooking.findById(id)
const oldValue = resourceBooking.toJSON()
// We can't remove dates of Resource Booking once they are both set
if (!_.isNil(oldValue.startDate) && !_.isNil(oldValue.endDate) && (_.isNull(data.startDate) || _.isNull(data.endDate))) {
throw new errors.BadRequestError('You cannot remove start or end date if both are already set for Resource Booking.')
}
// before updating the record, we need to check if any paid work periods tried to be deleted
await _ensurePaidWorkPeriodsNotDeleted(id, oldValue, data)
data.updatedBy = await helper.getUserId(currentUser.userId)
let entity
try {
await sequelize.transaction(async (t) => {
const updated = await resourceBooking.update(data, { transaction: t })
entity = updated.toJSON()
await processUpdate(entity)
})
} catch (e) {
if (entity) {
helper.postErrorEvent(config.TAAS_ERROR_TOPIC, entity, 'resourcebooking.update')
}
throw e
}
await helper.postEvent(config.TAAS_RESOURCE_BOOKING_UPDATE_TOPIC, entity, { oldValue: oldValue })
return entity
}
/**
* Partially update resourceBooking by id
* @param {Object} currentUser the user who perform this operation
* @param {String} id the resourceBooking id
* @param {Object} data the data to be updated
* @returns {Object} the updated resourceBooking
*/
async function partiallyUpdateResourceBooking (currentUser, id, data) {
return updateResourceBooking(currentUser, id, data)
}
partiallyUpdateResourceBooking.schema = Joi.object().keys({
currentUser: Joi.object().required(),
id: Joi.string().uuid().required(),
data: Joi.object().keys({
status: Joi.resourceBookingStatus(),
startDate: Joi.date().format('YYYY-MM-DD').allow(null),
endDate: Joi.date().format('YYYY-MM-DD').when('startDate', {
is: Joi.exist(),
then: Joi.date().format('YYYY-MM-DD').allow(null).min(Joi.ref('startDate')
).messages({
'date.min': 'endDate cannot be earlier than startDate'
}),
otherwise: Joi.date().format('YYYY-MM-DD').allow(null)
}),
memberRate: Joi.number().allow(null),
customerRate: Joi.number().allow(null),
rateType: Joi.rateType(),
sendWeeklySurvey: Joi.boolean(),
billingAccountId: Joi.number().allow(null)
}).required()
}).required()
/**
* Fully update resourceBooking by id
* @param {Object} currentUser the user who perform this operation
* @param {String} id the resourceBooking id
* @param {Object} data the data to be updated
* @returns {Object} the updated resourceBooking
*/
async function fullyUpdateResourceBooking (currentUser, id, data) {
if (data.jobId) {
await helper.ensureJobById(data.jobId) // ensure job exists
}
await helper.ensureUserById(data.userId) // ensure user exists
return updateResourceBooking(currentUser, id, data)
}
fullyUpdateResourceBooking.schema = Joi.object().keys({
currentUser: Joi.object().required(),
id: Joi.string().uuid().required(),
data: Joi.object().keys({
projectId: Joi.number().integer().required(),
userId: Joi.string().uuid().required(),
jobId: Joi.string().uuid().allow(null).default(null),
startDate: Joi.date().format('YYYY-MM-DD').allow(null).default(null),
endDate: Joi.date().format('YYYY-MM-DD').when('startDate', {
is: Joi.exist(),
then: Joi.date().format('YYYY-MM-DD').allow(null).default(null).min(Joi.ref('startDate')
).messages({
'date.min': 'endDate cannot be earlier than startDate'
}),
otherwise: Joi.date().format('YYYY-MM-DD').allow(null).default(null)
}),
memberRate: Joi.number().allow(null).default(null),
customerRate: Joi.number().allow(null).default(null),
rateType: Joi.rateType().required(),
status: Joi.resourceBookingStatus().required(),
sendWeeklySurvey: Joi.boolean().default(true),
billingAccountId: Joi.number().allow(null).default(null)
}).required()
}).required()
/**
* Delete resourceBooking by id
* @param {Object} currentUser the user who perform this operation
* @param {String} id the resourceBooking id
*/
async function deleteResourceBooking (currentUser, id) {
// check permission
if (!currentUser.hasManagePermission && !currentUser.isMachine) {
throw new errors.ForbiddenError('You are not allowed to perform this action!')
}
// we can't delete workperiods with paymentStatus 'partially-completed' or 'completed'.
await _ensurePaidWorkPeriodsNotDeleted(id)
const resourceBooking = await ResourceBooking.findById(id)
try {
await sequelize.transaction(async (t) => {
await resourceBooking.destroy({ transaction: t })
await processDelete({ id })
})
} catch (e) {
helper.postErrorEvent(config.TAAS_ERROR_TOPIC, { id }, 'resourcebooking.delete')
throw e
}
await helper.postEvent(config.TAAS_RESOURCE_BOOKING_DELETE_TOPIC, { id })
}
deleteResourceBooking.schema = Joi.object().keys({
currentUser: Joi.object().required(),
id: Joi.string().guid().required()
}).required()
/**
* List resourceBookings
* @param {Object} currentUser the user who perform this operation.
* @param {Object} criteria the search criteria
* @param {Object} options the extra options to control the function
* @returns {Object} the search result, contain total/page/perPage and result array
*/
async function searchResourceBookings (currentUser, criteria, options) {
// Evaluate criteria and extract the fields to be included or excluded
const queryOpt = _checkCriteriaAndGetFields(currentUser, criteria)
// check user permission
if (queryOpt.regularUser && !options.returnAll) {
if (!criteria.projectId) { // regular user can only search with filtering by "projectId"
throw new errors.ForbiddenError('Not allowed without filtering by "projectId"')
}
await _checkUserPermissionForGetResourceBooking(currentUser, criteria.projectId)
}
// `criteria`.projectIds` could be array of ids, or comma separated string of ids
// in case it's comma separated string of ids we have to convert it to an array of ids
if ((typeof criteria.projectIds) === 'string') {
criteria.projectIds = criteria.projectIds.trim().split(',').map(projectIdRaw => {
const projectIdRawTrimmed = projectIdRaw.trim()
const projectId = Number(projectIdRawTrimmed)
if (_.isNaN(projectId)) {
throw new errors.BadRequestError(`projectId ${projectIdRawTrimmed} is not a valid number`)
}
return projectId
})
}
// `criteria[workPeriods.paymentStatus]` could be array of paymentStatus, or comma separated string of paymentStatus
// in case it's comma separated string of paymentStatus we have to convert it to an array of paymentStatus
if ((typeof criteria['workPeriods.paymentStatus']) === 'string') {
criteria['workPeriods.paymentStatus'] = criteria['workPeriods.paymentStatus'].trim().split(',').map(ps => Joi.attempt({ paymentStatus: ps.trim() }, Joi.object().keys({ paymentStatus: Joi.paymentStatus() })).paymentStatus)
}
const page = criteria.page
let perPage
if (options.returnAll) {
// To simplify the logic we are use a very large number for perPage
// because in practice there could hardly be so many records to be returned.(also consider we are using filters in the meantime)
// the number is limited by `index.max_result_window`, its default value is 10000, see
// https://www.elastic.co/guide/en/elasticsearch/reference/current/index-modules.html#index-max-result-window
perPage = 10000
} else {
perPage = criteria.perPage
}
if (!criteria.sortBy) {
criteria.sortBy = 'id'
}
if (!criteria.sortOrder) {
criteria.sortOrder = 'desc'
}
if (_.has(criteria, 'workPeriods.sentSurveyError') && !criteria['workPeriods.sentSurveyError']) {
criteria['workPeriods.sentSurveyError'] = null
}
// this option to return data from DB is only for internal usage, and it cannot be passed from the endpoint
if (!options.returnFromDB) {
try {
const esQuery = {
index: config.get('esConfig.ES_INDEX_RESOURCE_BOOKING'),
_source_includes: queryOpt.include,
_source_excludes: [...queryOpt.excludeRB, ...queryOpt.excludeWP, ...queryOpt.excludeWPP],
body: {
query: {
bool: {
must: [],
filter: []
}
},
from: (page - 1) * perPage,
size: perPage,
sort: []
}
}
// change the date format to match with index schema
if (criteria.startDate) {
criteria.startDate = moment(criteria.startDate).format('YYYY-MM-DD')
}
if (criteria.endDate) {
criteria.endDate = moment(criteria.endDate).format('YYYY-MM-DD')
}
if (criteria['workPeriods.startDate']) {
criteria['workPeriods.startDate'] = moment(criteria['workPeriods.startDate']).format('YYYY-MM-DD')
}
if (criteria['workPeriods.endDate']) {
criteria['workPeriods.endDate'] = moment(criteria['workPeriods.endDate']).format('YYYY-MM-DD')
}
const sort = { [criteria.sortBy === 'id' ? '_id' : criteria.sortBy]: { order: criteria.sortOrder } }
if (queryOpt.sortByWP) {
const nestedSortFilter = {}
if (criteria['workPeriods.startDate']) {
nestedSortFilter.term = { 'workPeriods.startDate': criteria['workPeriods.startDate'] }
} else if (criteria['workPeriods.endDate']) {
nestedSortFilter.term = { 'workPeriods.endDate': criteria['workPeriods.endDate'] }
}
sort[criteria.sortBy].nested = { path: 'workPeriods', filter: nestedSortFilter }
}
esQuery.body.sort.push(sort)
// Apply ResourceBooking filters
_.each(_.pick(criteria, ['sendWeeklySurvey', 'status', 'startDate', 'endDate', 'rateType', 'projectId', 'jobId', 'userId', 'billingAccountId']), (value, key) => {
esQuery.body.query.bool.must.push({
term: {
[key]: {
value
}
}
})
})
// if criteria contains projectIds, filter projectId with this value
if (criteria.projectIds) {
esQuery.body.query.bool.filter.push({
terms: {
projectId: criteria.projectIds
}
})
}
// if criteria contains jobIds, filter jobIds with this value
if (criteria.jobIds && criteria.jobIds.length > 0) {
esQuery.body.query.bool.filter.push({
terms: {
jobId: criteria.jobIds
}
})
}
if (criteria['workPeriods.isFirstWeek']) {
esQuery.body.query.bool.must.push({
range: { startDate: { gte: criteria['workPeriods.startDate'] } }
})
}
if (criteria['workPeriods.isLastWeek']) {
esQuery.body.query.bool.must.push({
range: { endDate: { lte: moment(criteria['workPeriods.startDate']).add(6, 'day').format('YYYY-MM-DD') } }
})
}
// Apply WorkPeriod and WorkPeriodPayment filters
const workPeriodFilters = _.pick(criteria, ['workPeriods.sentSurveyError', 'workPeriods.sentSurvey', 'workPeriods.paymentStatus', 'workPeriods.startDate', 'workPeriods.endDate', 'workPeriods.userHandle'])
const workPeriodPaymentFilters = _.pick(criteria, ['workPeriods.payments.status', 'workPeriods.payments.days'])
if (!_.isEmpty(workPeriodFilters) || !_.isEmpty(workPeriodPaymentFilters)) {
const workPeriodsMust = []
_.each(workPeriodFilters, (value, key) => {
if (key === 'workPeriods.paymentStatus') {
workPeriodsMust.push({
terms: {
[key]: value
}
})
} else if (key !== 'workPeriods.sentSurveyError') {
workPeriodsMust.push({
term: {
[key]: {
value
}
}
})
}
})
const workPeriodPaymentPath = []
if (!_.isEmpty(workPeriodPaymentFilters)) {
const workPeriodPaymentsMust = []
_.each(workPeriodPaymentFilters, (value, key) => {
workPeriodPaymentsMust.push({
term: {
[key]: {
value
}
}
})
})
workPeriodPaymentPath.push({
nested: {
path: 'workPeriods.payments',
query: { bool: { must: workPeriodPaymentsMust } }
}
})
}
esQuery.body.query.bool.must.push({
nested: {
path: 'workPeriods',
query: {
bool: {
must: [...workPeriodsMust, ...workPeriodPaymentPath]
}
}
}
})
}
logger.debug({ component: 'ResourceBookingService', context: 'searchResourceBookings', message: `Query: ${JSON.stringify(esQuery)}` })
const { body } = await esClient.search(esQuery)
const resourceBookings = _.map(body.hits.hits, '_source')
// ESClient will return ResourceBookings with it's all nested WorkPeriods
// We re-apply WorkPeriod filters except userHandle because all WPs share same userHandle
if (!_.isEmpty(workPeriodFilters) || !_.isEmpty(workPeriodPaymentFilters)) {
_.each(resourceBookings, r => {
r.workPeriods = _.filter(r.workPeriods, wp => {
return _.every(_.omit(workPeriodFilters, 'workPeriods.userHandle'), (value, key) => {
key = key.split('.')[1]
if (key === 'sentSurveyError' && !workPeriodFilters['workPeriods.sentSurveyError']) {
return !wp[key]
} else if (key === 'paymentStatus') {
return _.includes(value, wp[key])
} else {
return wp[key] === value
}
}) && _.every(workPeriodPaymentFilters, (value, key) => {
key = key.split('.')[2]
wp.payments = _.filter(wp.payments, payment => payment[key] === value)
return wp.payments.length > 0
})
})
})
}
// sort Work Periods inside Resource Bookings by startDate just for comfort output
_.each(resourceBookings, r => {
if (_.isArray(r.workPeriods)) {
r.workPeriods = _.sortBy(r.workPeriods, ['startDate'])
}
})
return {
total: body.hits.total.value,
page,
perPage,
result: resourceBookings
}
} catch (err) {
logger.logFullError(err, { component: 'ResourceBookingService', context: 'searchResourceBookings' })
}
}
logger.info({ component: 'ResourceBookingService', context: 'searchResourceBookings', message: 'fallback to DB query' })
const filter = { [Op.and]: [] }
// Apply ResourceBooking filters
_.each(_.pick(criteria, ['sendWeeklySurvey', 'status', 'startDate', 'endDate', 'rateType', 'projectId', 'jobId', 'userId']), (value, key) => {
filter[Op.and].push({ [key]: value })
})
if (!_.isUndefined(criteria.billingAccountId)) {
filter[Op.and].push({ billingAccountId: criteria.billingAccountId === 0 ? null : criteria.billingAccountId })
}
if (criteria.projectIds) {
filter[Op.and].push({ projectId: criteria.projectIds })
}
if (criteria.jobIds && criteria.jobIds.length > 0) {
filter[Op.and].push({ id: criteria.jobIds })
}
if (criteria['workPeriods.isFirstWeek']) {
filter[Op.and].push({ startDate: { [Op.gte]: criteria['workPeriods.startDate'] } })
}
if (criteria['workPeriods.isLastWeek']) {
filter[Op.and].push({ endDate: { [Op.lte]: moment(criteria['workPeriods.startDate']).add(6, 'day').format('YYYY-MM-DD') } })
}
const queryCriteria = {
where: filter,
offset: ((page - 1) * perPage),
limit: perPage
}
// Select ResourceBooking fields
if (queryOpt.include.length > 0) {
queryCriteria.attributes = queryOpt.fieldsRB
} else if (queryOpt.excludeRB && queryOpt.excludeRB.length > 0) {
queryCriteria.attributes = { exclude: queryOpt.excludeRB }
}
// Include WorkPeriod Model
if (queryOpt.withWorkPeriods) {
queryCriteria.include = [{
model: WorkPeriod,
as: 'workPeriods',
required: false,
where: { [Op.and]: [] }
}]
// Select WorkPeriod fields
if (!queryOpt.allWorkPeriods) {
if (queryOpt.fieldsWP && queryOpt.fieldsWP.length > 0) {
queryCriteria.include[0].attributes = _.map(queryOpt.fieldsWP, f => _.split(f, '.')[1])
} else {
// we should include at least one workPeriod field
// if fields criteria has no workPeriod field but have workPeriodPayment field
queryCriteria.include[0].attributes = ['id']
}
} else if (queryOpt.excludeWP && queryOpt.excludeWP.length > 0) {
queryCriteria.include[0].attributes = { exclude: _.map(queryOpt.excludeWP, f => _.split(f, '.')[1]) }
}
// Apply WorkPeriod filters
_.each(_.pick(criteria, ['workPeriods.sentSurveyError', 'workPeriods.sentSurvey', 'workPeriods.startDate', 'workPeriods.endDate', 'workPeriods.paymentStatus']), (value, key) => {
key = key.split('.')[1]
queryCriteria.include[0].where[Op.and].push({ [key]: value })
})
if (criteria['workPeriods.userHandle']) {
queryCriteria.include[0].where[Op.and].push({ userHandle: { [Op.iLike]: criteria['workPeriods.userHandle'] } })
}
if (queryCriteria.include[0].where[Op.and].length > 0) {
queryCriteria.include[0].required = true
}
// Include WorkPeriodPayment Model
if (queryOpt.withWorkPeriodPayments) {
queryCriteria.include[0].include = [{
model: WorkPeriodPayment,
as: 'payments',
required: false,
where: { [Op.and]: [] }
}]
// Select WorkPeriodPayment fields
if (!queryOpt.allWorkPeriodPayments) {
queryCriteria.include[0].include[0].attributes = _.map(queryOpt.fieldsWPP, f => _.split(f, '.')[2])
} else if (queryOpt.excludeWPP && queryOpt.excludeWPP.length > 0) {
queryCriteria.include[0].include[0].attributes = { exclude: _.map(queryOpt.excludeWPP, f => _.split(f, '.')[2]) }
}
// Apply WorkPeriodPayment filters
_.each(_.pick(criteria, ['workPeriods.payments.status', 'workPeriods.payments.days']), (value, key) => {
key = key.split('.')[2]
queryCriteria.include[0].include[0].where[Op.and].push({ [key]: value })
})
if (queryCriteria.include[0].include[0].where[Op.and].length > 0) {
queryCriteria.include[0].required = true
queryCriteria.include[0].include[0].required = true
}
}
}
// Apply sorting criteria
if (!queryOpt.sortByWP) {
queryCriteria.order = [[criteria.sortBy, `${criteria.sortOrder} NULLS LAST`]]
} else {
queryCriteria.subQuery = false
queryCriteria.order = [[{ model: WorkPeriod, as: 'workPeriods' }, _.split(criteria.sortBy, '.')[1], `${criteria.sortOrder} NULLS LAST`]]
}
const resultModel = await ResourceBooking.findAll(queryCriteria)
const result = _.map(resultModel, r => r.toJSON())
// omit workPeriod.id if fields criteria has no workPeriod field but have workPeriodPayment field
if (queryOpt.withWorkPeriods && !queryOpt.allWorkPeriods && (!queryOpt.fieldsWP || queryOpt.fieldsWP.length === 0)) {
_.each(result, r => {
if (_.isArray(r.workPeriods)) {
r.workPeriods = _.map(r.workPeriods, wp => _.omit(wp, 'id'))
}
})
}
// sort Work Periods inside Resource Bookings by startDate just for comfort output
_.each(result, r => {
if (_.isArray(r.workPeriods)) {
r.workPeriods = _.sortBy(r.workPeriods, ['startDate'])
}
})
let countQuery
countQuery = _.omit(queryCriteria, ['limit', 'offset', 'attributes', 'order'])
if (queryOpt.withWorkPeriods && !queryCriteria.include[0].required) {
countQuery = _.omit(countQuery, ['include'])
}
countQuery.subQuery = false
countQuery.group = ['ResourceBooking.id']
const total = await ResourceBooking.count(countQuery)
return {
fromDb: true,
total: total.length,
page,
perPage,
result
}
}
searchResourceBookings.schema = Joi.object().keys({
currentUser: Joi.object().required(),
criteria: Joi.object().keys({
fields: Joi.string(),
page: Joi.page(),
perPage: Joi.perPage(),
sortBy: Joi.string().valid('id', 'rateType', 'startDate', 'endDate', 'customerRate', 'memberRate', 'status',
'workPeriods.userHandle', 'workPeriods.daysWorked', 'workPeriods.daysPaid', 'workPeriods.paymentTotal', 'workPeriods.paymentStatus'),
sortOrder: Joi.string().valid('desc', 'asc'),
status: Joi.resourceBookingStatus(),
startDate: Joi.date().format('YYYY-MM-DD'),
endDate: Joi.date().format('YYYY-MM-DD'),
rateType: Joi.rateType(),
jobId: Joi.string().uuid(),
jobIds: Joi.array().items(Joi.string().uuid()),
userId: Joi.string().uuid(),
projectId: Joi.number().integer(),
projectIds: Joi.alternatives(
Joi.string(),
Joi.array().items(Joi.number().integer())
),
sendWeeklySurvey: Joi.boolean(),
billingAccountId: Joi.number().integer(),
'workPeriods.paymentStatus': Joi.alternatives(
Joi.string(),
Joi.array().items(Joi.paymentStatus())
),
'workPeriods.startDate': Joi.date().format('YYYY-MM-DD').custom((value, helpers) => {
const date = new Date(value)
const weekDay = date.getDay()
if (weekDay !== 0) {
return helpers.message('workPeriods.startDate should be always Sunday')
}
return value
}),
'workPeriods.endDate': Joi.date().format('YYYY-MM-DD').custom((value, helpers) => {
const date = new Date(value)
const weekDay = date.getDay()
if (weekDay !== 6) {
return helpers.message('workPeriods.endDate should be always Saturday')
}
return value
}),
'workPeriods.userHandle': Joi.string(),
'workPeriods.sentSurvey': Joi.boolean(),
'workPeriods.sentSurveyError': Joi.object().keys({
errorCode: Joi.number().integer().min(0),
errorMessage: Joi.string()
}).allow('').optional(),
'workPeriods.isFirstWeek': Joi.when(Joi.ref('workPeriods.startDate', { separator: false }), {
is: Joi.exist(),
then: Joi.boolean().default(false),
otherwise: Joi.boolean().valid(false).messages({
'any.only': 'Cannot filter by "isFirstWeek" without "startDate"'
})
}),
'workPeriods.isLastWeek': Joi.boolean().when(Joi.ref('workPeriods.startDate', { separator: false }), {
is: Joi.exist(),
then: Joi.boolean().default(false),
otherwise: Joi.boolean().valid(false).messages({
'any.only': 'Cannot filter by "isLastWeek" without "startDate"'
})
}),
'workPeriods.payments.status': Joi.workPeriodPaymentStatus(),
'workPeriods.payments.days': Joi.number().integer().min(0).max(10)
}).required(),
options: Joi.object().keys({
returnAll: Joi.boolean().default(false),
returnFromDB: Joi.boolean().default(false)
}).default({
returnAll: false,
returnFromDB: false
})
}).required()
module.exports = {
getResourceBooking,
createResourceBooking,
partiallyUpdateResourceBooking,
fullyUpdateResourceBooking,
deleteResourceBooking,
searchResourceBookings
}
<file_sep>version: '3'
services:
taas-apis:
container_name: taas-apis
build:
context: .
dockerfile: ./docker/Dockerfile
environment:
- DATABASE_URL=postgres://postgres:postgres@postgres:5432/postgres
- ES_HOST=http://elasticsearch:9200
ports:
- '3000:3000'
depends_on:
- postgres
- elasticsearch
restart: always
postgres:
image: postgres
environment:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
ports:
- 5432:5432
elasticsearch:
image: docker.elastic.co/elasticsearch/elasticsearch:7.9.2
environment:
- discovery.type=single-node
ports:
- 9200:9200
<file_sep>Recruit CRM Job Data Sync Script
===
# Configuration
Configuration file is at `./scripts/recruit-crm-job-sync/config.js`.
# Usage
``` bash
node scripts/recruit-crm-job-sync <pathname-to-a-csv-file>
```
By default the script updates jobs via `TC_API`.
# Example
1. Follow the README for `taas-apis` to deploy Taas API locally
2. Create two jobs via `Jobs > create job with booking manager` in Postman, with external ids `51913016` and `51892637` for each of the jobs.
**NOTE**: The external ids `51913016` and `51902826` could be found at `scripts/recruit-crm-job-sync/example_data.csv` under the Slug column.
3. Configure env variable `RCRM_SYNC_TAAS_API_URL` so that the script could make use of the local API:
``` bash
export RCRM_SYNC_TAAS_API_URL=http://localhost:3000/api/v5
```
4. Run the script against the sample CSV file and pipe the output from the script to a temporary file:
``` bash
node scripts/recruit-crm-job-sync scripts/recruit-crm-job-sync/example_data.csv | tee /tmp/report.txt
```
The output should be like this:
``` bash
DEBUG: processing line #1 - {"ID":"1","Name":"Data job Engineer","Description":"","Qualification":"","Specialization":"","Minimum Experience In Years":"1","Maximum Experience In Years":"3","Minimum Annual Salary":"10","Maximum Annual Salary":"20","Number Of Openings":"2","Job Status":"Closed","Company":"company 1","Contact":" ","Currency":"$","allowApply":"Yes","Collaborator":"","Locality":"","City":"","Job Code":"J123456","Createdby":"abc","Created On":"02-Jun-20","Updated By":"abc","Updated On":"17-Feb-21","Owner":"abc","Custom Column 1":"","Custom Column 2":"","Custom Column 3":"","Custom Column 4":"","Custom Column 5":"","Custom Column 6":"","Custom Column 7":"","Custom Column 8":"","Custom Column 9":"","Custom Column 10":"","Custom Column 11":"","Custom Column 12":"","Custom Column 13":"","Custom Column 14":"","Custom Column 15":"","externalId":"51892637","_lnum":1}
ERROR: #1 - [EXTERNAL_ID_NOT_FOUND] externalId: 51892637 job not found
DEBUG: processed line #1
DEBUG: processing line #2 - {"ID":"2","Name":"<NAME>","Description":"","Qualification":"","Specialization":"","Minimum Experience In Years":"2","Maximum Experience In Years":"5","Minimum Annual Salary":"10","Maximum Annual Salary":"20","Number Of Openings":"10","Job Status":"Closed","Company":"company 2","Contact":"abc","Currency":"$","allowApply":"Yes","Collaborator":"","Locality":"","City":"","Job Code":"J123457","Createdby":"abc","Created On":"02-Jun-20","Updated By":"abc","Updated On":"12-Nov-20","Owner":"abc","Custom Column 1":"","Custom Column 2":"","Custom Column 3":"","Custom Column 4":"","Custom Column 5":"","Custom Column 6":"","Custom Column 7":"","Custom Column 8":"","Custom Column 9":"","Custom Column 10":"","Custom Column 11":"","Custom Column 12":"","Custom Column 13":"","Custom Column 14":"","Custom Column 15":"","externalId":"51913016","_lnum":2}
DEBUG: jobId: 34cee9aa-e45f-47ed-9555-ffd3f7196fec isApplicationPageActive(current): false - isApplicationPageActive(to be synced): true
INFO: #2 - id: 34cee9aa-e45f-47ed-9555-ffd3f7196fec isApplicationPageActive: true "job" updated
DEBUG: processed line #2
DEBUG: processing line #3 - {"ID":"3","Name":"<NAME>","Description":"","Qualification":"","Specialization":"","Minimum Experience In Years":"3","Maximum Experience In Years":"7","Minimum Annual Salary":"10","Maximum Annual Salary":"20","Number Of Openings":"4","Job Status":"Canceled","Company":"company 3","Contact":" ","Currency":"$","allowApply":"No","Collaborator":"","Locality":"","City":"","Job Code":"J123458","Createdby":"abc","Created On":"04-Jun-20","Updated By":"abc","Updated On":"12-Nov-20","Owner":"abc","Custom Column 1":"","Custom Column 2":"","Custom Column 3":"","Custom Column 4":"","Custom Column 5":"","Custom Column 6":"","Custom Column 7":"","Custom Column 8":"","Custom Column 9":"","Custom Column 10":"","Custom Column 11":"","Custom Column 12":"","Custom Column 13":"","Custom Column 14":"","Custom Column 15":"","externalId":"51902826","_lnum":3}
DEBUG: jobId: 4acde317-c364-4b79-aa77-295b98143c8b isApplicationPageActive(current): false - isApplicationPageActive(to be synced): false
WARN: #3 - isApplicationPageActive is already set
DEBUG: processed line #3
DEBUG: processing line #4 - {"ID":"5","Name":"Data Engineers and Data Architects","Description":"","Qualification":"","Specialization":"","Minimum Experience In Years":"4","Maximum Experience In Years":"9","Minimum Annual Salary":"10","Maximum Annual Salary":"20","Number Of Openings":"8","Job Status":"Closed","Company":"company 4","Contact":" ","Currency":"$","allowApply":"Yes","Collaborator":"","Locality":"","City":"","Job Code":"J123459","Createdby":"abc","Created On":"09-Jun-20","Updated By":"abc","Updated On":"12-Nov-20","Owner":"abc","Custom Column 1":"","Custom Column 2":"","Custom Column 3":"","Custom Column 4":"","Custom Column 5":"","Custom Column 6":"","Custom Column 7":"","Custom Column 8":"","Custom Column 9":"","Custom Column 10":"","Custom Column 11":"","Custom Column 12":"","Custom Column 13":"","Custom Column 14":"","Custom Column 15":"","externalId":"51811161","_lnum":4}
ERROR: #4 - [EXTERNAL_ID_NOT_FOUND] externalId: 51811161 job not found
DEBUG: processed line #4
DEBUG: processing line #5 - {"ID":"6","Name":"<NAME>","Description":"Java & J2EE or Python, Docker, Kubernetes, AWS or GCP","Qualification":"","Specialization":"","Minimum Experience In Years":"5","Maximum Experience In Years":"10","Minimum Annual Salary":"10","Maximum Annual Salary":"20","Number Of Openings":"5","Job Status":"Closed","Company":"company 5","Contact":" ","Currency":"$","allowApply":"No","Collaborator":"","Locality":"","City":"","Job Code":"J123460","Createdby":"abc","Created On":"12-Jun-20","Updated By":"abc","Updated On":"12-Nov-20","Owner":"abc","Custom Column 1":"","Custom Column 2":"","Custom Column 3":"","Custom Column 4":"","Custom Column 5":"","Custom Column 6":"","Custom Column 7":"","Custom Column 8":"","Custom Column 9":"","Custom Column 10":"","Custom Column 11":"","Custom Column 12":"","Custom Column 13":"","Custom Column 14":"","Custom Column 15":"","externalId":"51821342","_lnum":5}
ERROR: #5 - [EXTERNAL_ID_NOT_FOUND] externalId: 51821342 job not found
DEBUG: processed line #5
DEBUG: processing line #6 - {"ID":"7","Name":"<NAME>","Description":"","Qualification":"","Specialization":"","Minimum Experience In Years":"0","Maximum Experience In Years":"0","Minimum Annual Salary":"10","Maximum Annual Salary":"20","Number Of Openings":"2","Job Status":"Closed","Company":"company 6","Contact":"abc","Currency":"$","allowApply":"Yes","Collaborator":"","Locality":"","City":"","Job Code":"J123461","Createdby":"abc","Created On":"12-Jun-20","Updated By":"abc","Updated On":"12-Nov-20","Owner":"abc","Custom Column 1":"","Custom Column 2":"","Custom Column 3":"","Custom Column 4":"","Custom Column 5":"","Custom Column 6":"","Custom Column 7":"","Custom Column 8":"","Custom Column 9":"","Custom Column 10":"","Custom Column 11":"","Custom Column 12":"","Custom Column 13":"","Custom Column 14":"","Custom Column 15":"","externalId":"51831524","_lnum":6}
ERROR: #6 - [EXTERNAL_ID_NOT_FOUND] externalId: 51831524 job not found
DEBUG: processed line #6
DEBUG: processing line #7 - {"ID":"","Name":"","Description":"","Qualification":"","Specialization":"","Minimum Experience In Years":"","Maximum Experience In Years":"","Minimum Annual Salary":"","Maximum Annual Salary":"","Number Of Openings":"","Job Status":"","Company":"","Contact":"","Currency":"","allowApply":"","Collaborator":"","Locality":"","City":"","Job Code":"","Createdby":"","Created On":"","Updated By":"","Updated On":"","Owner":"","Custom Column 1":"","Custom Column 2":"","Custom Column 3":"","Custom Column 4":"","Custom Column 5":"","Custom Column 6":"","Custom Column 7":"","Custom Column 8":"","Custom Column 9":"","Custom Column 10":"","Custom Column 11":"","Custom Column 12":"","Custom Column 13":"","Custom Column 14":"","Custom Column 15":"","externalId":"","_lnum":7}
ERROR: #7 - "allowApply" must be one of [Yes, No]
DEBUG: processed line #7
INFO: === summary ===
INFO: No. of records read = 7
INFO: No. of records updated for field isApplicationPageActive = true = 1
INFO: No. of records updated for field isApplicationPageActive = false = 0
INFO: No. of records : externalId not found = 4
INFO: No. of records failed(all) = 5
INFO: No. of records failed(excluding "externalId not found") = 1
INFO: No. of records skipped = 1
INFO: done!
```
The following command could be used to extract the summary from the output:
``` bash
cat /tmp/report.txt | grep 'No. of records' | cut -d' ' -f2-
```
To list all skipped lines:
``` bash
cat /tmp/report.txt | grep 'WARN' -B 3
<file_sep>const config = require('config')
/*
* Create role table
*/
module.exports = {
up: async (queryInterface, Sequelize) => {
await queryInterface.createTable('role_search_requests', {
id: {
type: Sequelize.UUID,
primaryKey: true,
allowNull: false,
defaultValue: Sequelize.UUIDV4
},
memberId: {
field: 'member_id',
type: Sequelize.UUID
},
previousRoleSearchRequestId: {
field: 'previous_role_search_request_id',
type: Sequelize.UUID
},
roleId: {
field: 'role_id',
type: Sequelize.UUID,
references: {
model: {
tableName: 'roles',
schema: config.DB_SCHEMA_NAME
},
key: 'id'
},
onUpdate: 'CASCADE',
onDelete: 'SET NULL'
},
jobDescription: {
field: 'job_description',
type: Sequelize.STRING()
},
skills: {
type: Sequelize.ARRAY({
type: Sequelize.UUID
})
},
createdBy: {
field: 'created_by',
type: Sequelize.UUID,
allowNull: false
},
updatedBy: {
field: 'updated_by',
type: Sequelize.UUID
},
createdAt: {
field: 'created_at',
type: Sequelize.DATE
},
updatedAt: {
field: 'updated_at',
type: Sequelize.DATE
},
deletedAt: {
field: 'deleted_at',
type: Sequelize.DATE
}
}, {
schema: config.DB_SCHEMA_NAME
})
},
down: async (queryInterface, Sequelize) => {
await queryInterface.dropTable({
tableName: 'role_search_requests',
schema: config.DB_SCHEMA_NAME
})
}
}
<file_sep>/**
* This service provides operations of WorkPeriod.
*/
const _ = require('lodash')
const Joi = require('joi').extend(require('@joi/date'))
const config = require('config')
const HttpStatus = require('http-status-codes')
const { Op } = require('sequelize')
const uuid = require('uuid')
const helper = require('../common/helper')
const logger = require('../common/logger')
const errors = require('../common/errors')
const models = require('../models')
const {
processCreate,
processUpdate,
processDelete
} = require('../esProcessors/WorkPeriodProcessor')
const constants = require('../../app-constants')
const moment = require('moment')
const WorkPeriod = models.WorkPeriod
const esClient = helper.getESClient()
const sequelize = models.sequelize
// "startDate" and "endDate" should always represent one week:
// "startDate" should be always Monday and "endDate" should be always Sunday of the same week.
// It should not include time or timezone, only date.
Joi.workPeriodStartDate = () => Joi.date().format('YYYY-MM-DD').custom((value, helpers) => {
const date = new Date(value)
const weekDay = date.getDay()
if (weekDay !== 0) {
return helpers.message('startDate should be always Sunday')
}
return value
})
Joi.workPeriodEndDate = () => Joi.date()
.when('startDate', {
is: Joi.exist(),
then: Joi.date().format('YYYY-MM-DD').equal(Joi.ref('startDate', {
adjust: (value) => {
const date = new Date(value)
date.setDate(date.getDate() + 6)
return date
}
})).messages({
'any.only': 'endDate should be always the next Saturday'
}),
otherwise: Joi.date().format('YYYY-MM-DD').custom((value, helpers) => {
const date = new Date(value)
const weekDay = date.getDay()
if (weekDay !== 6) {
return helpers.message('endDate should be always Saturday')
}
return value
}).required()
})
Joi.workPeriodEndDateOptional = () => Joi.date()
.when('startDate', {
is: Joi.exist(),
then: Joi.date().format('YYYY-MM-DD').equal(Joi.ref('startDate', {
adjust: (value) => {
const date = new Date(value)
date.setDate(date.getDate() + 6)
return date
}
})).messages({
'any.only': 'endDate should be always the next Saturday'
}),
otherwise: Joi.date().format('YYYY-MM-DD').custom((value, helpers) => {
const date = new Date(value)
const weekDay = date.getDay()
if (weekDay !== 6) {
return helpers.message('endDate should be always Saturday')
}
return value
})
})
/**
* Check user scopes for getting payments
* @param {Object} currentUser the user who perform this operation.
* @returns {Boolean} true if user is machine and has read/all payment scopes
*/
function _checkUserScopesForGetPayments (currentUser) {
const getPaymentsScopes = [constants.Scopes.READ_WORK_PERIOD_PAYMENT, constants.Scopes.ALL_WORK_PERIOD_PAYMENT]
return currentUser.isMachine && helper.checkIfExists(getPaymentsScopes, currentUser.scopes)
}
/**
* Get which fields to be excluded from result
* @param {Object} currentUser the user who perform this operation.
* @returns {Object} queryOpt
* @returns {Object} queryOpt.excludeES excluded fields for ES query
* @returns {Object} queryOpt.excludeDB excluded fields for DB query
* @returns {Object} queryOpt.withPayments is payments field included?
*/
function _getWorkPeriodFilteringFields (currentUser) {
const queryOpt = {
excludeES: [],
excludeDB: [],
withPayments: false
}
if (!currentUser.hasManagePermission && !currentUser.isMachine) {
queryOpt.excludeES.push('workPeriods.paymentTotal')
queryOpt.excludeDB.push('paymentTotal')
}
if (currentUser.hasManagePermission || _checkUserScopesForGetPayments(currentUser)) {
queryOpt.withPayments = true
} else { queryOpt.excludeES.push('workPeriods.payments') }
return queryOpt
}
/**
* Check user permission for getting work period.
*
* @param {Object} currentUser the user who perform this operation.
* @param {String} projectId the project id
* @returns {undefined}
*/
async function _checkUserPermissionForGetWorkPeriod (currentUser, projectId) {
if (!currentUser.hasManagePermission && !currentUser.isMachine && !currentUser.isConnectManager) {
await helper.checkIsMemberOfProject(currentUser.userId, projectId)
}
}
/**
* Check user permission for creating or updating work period.
*
* @param {Object} currentUser the user who perform this operation.
* @returns {undefined}
*/
async function _checkUserPermissionForWriteWorkPeriod (currentUser) {
if (!currentUser.hasManagePermission && !currentUser.isMachine) {
throw new errors.ForbiddenError('You are not allowed to perform this action!')
}
}
/**
* Checks if one of the date is missing and autocalculates it.
* @param {Object} data workPeriod data object
*/
function _autoCalculateDates (data) {
if (data.startDate && !data.endDate) {
const date = new Date(data.startDate)
date.setDate(date.getDate() + 6)
data.endDate = date
} else if (!data.startDate && data.endDate) {
const date = new Date(data.endDate)
date.setDate(date.getDate() - 6)
data.startDate = date
}
}
/**
* Get workPeriod by id
* @param {Object} currentUser the user who perform this operation.
* @param {String} id the workPeriod id
* @param {Boolean} fromDb flag if query db for data or not
* @returns {Object} the workPeriod
*/
async function getWorkPeriod (currentUser, id, fromDb = false) {
// get query options according to currentUser
const queryOpt = _getWorkPeriodFilteringFields(currentUser)
if (!fromDb) {
try {
const resourceBooking = await esClient.search({
index: config.esConfig.ES_INDEX_RESOURCE_BOOKING,
_source_includes: 'workPeriods',
_source_excludes: queryOpt.excludeES,
body: {
query: {
nested: {
path: 'workPeriods',
query: {
match: { 'workPeriods.id': id }
}
}
}
}
})
if (!resourceBooking.body.hits.total.value) {
throw new errors.NotFoundError()
}
const workPeriod = _.find(resourceBooking.body.hits.hits[0]._source.workPeriods, { id })
await _checkUserPermissionForGetWorkPeriod(currentUser, workPeriod.projectId) // check user permission
return workPeriod
} catch (err) {
if (helper.isDocumentMissingException(err)) {
throw new errors.NotFoundError(`id: ${id} "WorkPeriod" not found`)
}
if (err.httpStatus === HttpStatus.UNAUTHORIZED) {
throw err
}
logger.logFullError(err, { component: 'WorkPeriodService', context: 'getWorkPeriod' })
}
}
logger.info({ component: 'WorkPeriodService', context: 'getWorkPeriod', message: 'try to query db for data' })
const workPeriod = await WorkPeriod.findById(id, { withPayments: queryOpt.withPayments, exclude: queryOpt.excludeDB })
await _checkUserPermissionForGetWorkPeriod(currentUser, workPeriod.projectId) // check user permission
return workPeriod.dataValues
}
getWorkPeriod.schema = Joi.object().keys({
currentUser: Joi.object().required(),
id: Joi.string().guid().required(),
fromDb: Joi.boolean()
}).required()
/**
* Create workPeriod
* @param {Object} workPeriod the workPeriod to be created
* @returns {Object} the created workPeriod
*/
async function createWorkPeriod (workPeriod) {
// If one of the dates are missing then auto-calculate it
_autoCalculateDates(workPeriod)
const resourceBooking = await helper.ensureResourceBookingById(workPeriod.resourceBookingId) // ensure resource booking exists
workPeriod.projectId = resourceBooking.projectId
const user = await helper.ensureUserById(resourceBooking.userId) // ensure user exists
workPeriod.userHandle = user.handle
workPeriod.id = uuid.v4()
workPeriod.createdBy = config.m2m.M2M_AUDIT_USER_ID
const key = `resourceBooking.id:${workPeriod.resourceBookingId}`
let entity
try {
await sequelize.transaction(async (t) => {
const created = await WorkPeriod.create(workPeriod, { transaction: t })
entity = created.toJSON()
await processCreate({ ...entity, key })
})
} catch (err) {
if (entity) {
helper.postErrorEvent(config.TAAS_ERROR_TOPIC, entity, 'workperiod.create')
}
if (!_.isUndefined(err.original)) {
throw new errors.BadRequestError(err.original.detail)
} else {
throw err
}
}
await helper.postEvent(config.TAAS_WORK_PERIOD_CREATE_TOPIC, entity, { key })
return entity
}
createWorkPeriod.schema = Joi.object().keys({
workPeriod: Joi.object().keys({
resourceBookingId: Joi.string().uuid().required(),
startDate: Joi.workPeriodStartDate(),
endDate: Joi.workPeriodEndDate(),
sentSurvey: Joi.boolean().default(false),
daysWorked: Joi.number().integer().min(0).max(5).required(),
daysPaid: Joi.number().default(0).forbidden(),
paymentTotal: Joi.number().default(0).forbidden(),
paymentStatus: Joi.paymentStatus().required()
}).required()
}).required()
/**
* Update workPeriod
* @param {Object} currentUser the user who perform this operation
* @param {String} id the workPeriod id
* @param {Object} data the data to be updated
* @returns {Object} the updated workPeriod
*/
async function updateWorkPeriod (currentUser, id, data) {
// check permission
await _checkUserPermissionForWriteWorkPeriod(currentUser)
const workPeriod = await WorkPeriod.findById(id, { withPayments: true })
const oldValue = workPeriod.toJSON()
if (data.daysWorked < oldValue.daysPaid) {
throw new errors.BadRequestError(`Cannot update daysWorked (${data.daysWorked}) to the value less than daysPaid (${oldValue.daysPaid})`)
}
const resourceBooking = await helper.ensureResourceBookingById(oldValue.resourceBookingId)
const weeks = helper.extractWorkPeriods(resourceBooking.startDate, resourceBooking.endDate)
if (_.isEmpty(weeks)) {
throw new errors.ConflictError('Resource booking has missing dates')
}
const thisWeek = _.find(weeks, ['startDate', oldValue.startDate])
if (_.isNil(thisWeek)) {
throw new errors.ConflictError('Work Period dates are not compatible with Resource Booking dates')
}
data.paymentStatus = helper.calculateWorkPeriodPaymentStatus(_.assign({}, oldValue, data))
if (!currentUser.isMachine) {
data.updatedBy = await helper.getUserId(currentUser.userId)
}
const key = `resourceBooking.id:${workPeriod.resourceBookingId}`
let entity
try {
await sequelize.transaction(async (t) => {
const updated = await workPeriod.update(data, { transaction: t })
entity = updated.toJSON()
entity = _.omit(entity, ['payments'])
await processUpdate({ ...entity, key })
})
} catch (e) {
if (entity) {
helper.postErrorEvent(config.TAAS_ERROR_TOPIC, entity, 'workperiod.update')
}
throw e
}
const oldValueWithoutPayments = _.omit(oldValue, ['payments'])
await helper.postEvent(config.TAAS_WORK_PERIOD_UPDATE_TOPIC, entity, { oldValue: oldValueWithoutPayments, key })
return entity
}
/**
* Partially update workPeriod by id
* @param {Object} currentUser the user who perform this operation
* @param {String} id the workPeriod id
* @param {Object} data the data to be updated
* @returns {Object} the updated workPeriod
*/
async function partiallyUpdateWorkPeriod (currentUser, id, data) {
return updateWorkPeriod(currentUser, id, data)
}
partiallyUpdateWorkPeriod.schema = Joi.object().keys({
currentUser: Joi.object().required(),
id: Joi.string().uuid().required(),
data: Joi.object().keys({
daysWorked: Joi.number().integer().min(0).max(10),
sentSurvey: Joi.boolean(),
sentSurveyError: Joi.object().keys({
errorCode: Joi.number().integer().min(0),
errorMessage: Joi.string()
})
}).required().min(1)
}).required()
/**
* Delete workPeriod by id
* @param {String} id the workPeriod id
*/
async function deleteWorkPeriod (id) {
const workPeriod = await WorkPeriod.findById(id, { withPayments: true })
if (_.some(workPeriod.payments, payment => constants.ActiveWorkPeriodPaymentStatuses.indexOf(payment.status) !== -1)) {
throw new errors.BadRequestError(`Can't delete WorkPeriod as it has associated WorkPeriodsPayment with one of statuses ${constants.ActiveWorkPeriodPaymentStatuses.join(', ')}`)
}
const key = `resourceBooking.id:${workPeriod.resourceBookingId}`
try {
await sequelize.transaction(async (t) => {
await models.WorkPeriodPayment.destroy({
where: {
workPeriodId: id
},
transaction: t
})
await workPeriod.destroy({ transaction: t })
await processDelete({ id, key })
})
} catch (e) {
helper.postErrorEvent(config.TAAS_ERROR_TOPIC, { id }, 'workperiod.delete')
throw e
}
await helper.postEvent(config.TAAS_WORK_PERIOD_DELETE_TOPIC, { id }, { key })
}
deleteWorkPeriod.schema = Joi.object().keys({
id: Joi.string().uuid().required()
}).required()
/**
* List workPeriods
* @param {Object} currentUser the user who perform this operation.
* @param {Object} criteria the search criteria
* @param {Object} options the extra options to control the function
* @returns {Object} the search result, contain total/page/perPage and result array
*/
async function searchWorkPeriods (currentUser, criteria, options = { returnAll: false }) {
// check user permission
if (!currentUser.hasManagePermission && !currentUser.isMachine && !currentUser.isConnectManager && !options.returnAll) {
if (!criteria.projectId) { // regular user can only search with filtering by "projectId"
throw new errors.ForbiddenError('Not allowed without filtering by "projectId"')
}
await helper.checkIsMemberOfProject(currentUser.userId, criteria.projectId)
}
const queryOpt = _getWorkPeriodFilteringFields(currentUser)
// `criteria.resourceBookingIds` could be array of ids, or comma separated string of ids
// in case it's comma separated string of ids we have to convert it to an array of ids
if ((typeof criteria.resourceBookingIds) === 'string') {
criteria.resourceBookingIds = criteria.resourceBookingIds.trim().split(',').map(resourceBookingIdRaw => {
const resourceBookingId = resourceBookingIdRaw.trim()
if (!uuid.validate(resourceBookingId)) {
throw new errors.BadRequestError(`resourceBookingId ${resourceBookingId} is not a valid uuid`)
}
return resourceBookingId
})
}
// `criteria.paymentStatus` could be array of paymentStatus, or comma separated string of paymentStatus
// in case it's comma separated string of paymentStatus we have to convert it to an array of paymentStatus
if ((typeof criteria.paymentStatus) === 'string') {
criteria.paymentStatus = criteria.paymentStatus.trim().split(',').map(ps => Joi.attempt({ paymentStatus: ps.trim() }, Joi.object().keys({ paymentStatus: Joi.paymentStatus() })).paymentStatus)
}
const page = criteria.page
const perPage = criteria.perPage
if (!criteria.sortBy) {
criteria.sortBy = 'id'
}
if (!criteria.sortOrder) {
criteria.sortOrder = 'desc'
}
try {
const esQuery = {
index: config.get('esConfig.ES_INDEX_RESOURCE_BOOKING'),
_source_includes: 'workPeriods',
_source_excludes: queryOpt.excludeES,
body: {
query: {
nested: {
path: 'workPeriods',
query: { bool: { must: [] } }
}
},
size: 10000
// We use a very large number for size, because we can't paginate nested documents
// and in practice there could hardly be so many records to be returned.(also consider we are using filters in the meantime)
// the number is limited by `index.max_result_window`, its default value is 10000, see
// https://www.elastic.co/guide/en/elasticsearch/reference/current/index-modules.html#index-max-result-window
}
}
// change the date format to match with database model
if (criteria.startDate) {
criteria.startDate = moment(criteria.startDate).format('YYYY-MM-DD')
}
if (criteria.endDate) {
criteria.endDate = moment(criteria.endDate).format('YYYY-MM-DD')
}
// Apply filters
_.each(_.pick(criteria, ['resourceBookingId', 'userHandle', 'projectId', 'startDate', 'endDate']), (value, key) => {
esQuery.body.query.nested.query.bool.must.push({
term: {
[`workPeriods.${key}`]: {
value
}
}
})
})
if (criteria.paymentStatus) {
esQuery.body.query.nested.query.bool.must.push({
terms: {
'workPeriods.paymentStatus': criteria.paymentStatus
}
})
}
// if criteria contains resourceBookingIds, filter resourceBookingId with this value
if (criteria.resourceBookingIds) {
esQuery.body.query.nested.query.bool.filter = [{
terms: {
'workPeriods.resourceBookingId': criteria.resourceBookingIds
}
}]
}
logger.debug({ component: 'WorkPeriodService', context: 'searchWorkPeriods', message: `Query: ${JSON.stringify(esQuery)}` })
const { body } = await esClient.search(esQuery)
let workPeriods = _.reduce(body.hits.hits, (acc, resourceBooking) => _.concat(acc, resourceBooking._source.workPeriods), [])
// ESClient will return ResourceBookings with it's all nested WorkPeriods
// We re-apply WorkPeriod filters
_.each(_.pick(criteria, ['startDate', 'endDate']), (value, key) => {
workPeriods = _.filter(workPeriods, { [key]: value })
})
if (criteria.paymentStatus) {
workPeriods = _.filter(workPeriods, wp => _.includes(criteria.paymentStatus, wp.paymentStatus))
}
workPeriods = _.sortBy(workPeriods, [criteria.sortBy])
if (criteria.sortOrder === 'desc') {
workPeriods = _.reverse(workPeriods)
}
const total = workPeriods.length
if (!options.returnAll) {
workPeriods = _.slice(workPeriods, (page - 1) * perPage, page * perPage)
}
return {
total,
page,
perPage,
result: workPeriods
}
} catch (err) {
logger.logFullError(err, { component: 'WorkPeriodService', context: 'searchWorkPeriods' })
}
logger.info({ component: 'WorkPeriodService', context: 'searchWorkPeriods', message: 'fallback to DB query' })
const filter = { [Op.and]: [] }
_.each(_.pick(criteria, ['resourceBookingId', 'userHandle', 'projectId', 'startDate', 'endDate', 'paymentStatus']), (value, key) => {
filter[Op.and].push({ [key]: value })
})
if (criteria.resourceBookingIds) {
filter[Op.and].push({ resourceBookingId: criteria.resourceBookingIds })
}
const queryCriteria = {
where: filter,
offset: ((page - 1) * perPage),
limit: perPage,
order: [[criteria.sortBy, criteria.sortOrder]]
}
// add excluded fields criteria
if (queryOpt.excludeDB.length > 0) {
queryCriteria.attributes = { exclude: queryOpt.excludeDB }
}
// include WorkPeriodPayment model
if (queryOpt.withPayments) {
queryCriteria.include = [{
model: models.WorkPeriodPayment,
as: 'payments',
required: false
}]
}
const workPeriods = await WorkPeriod.findAll(queryCriteria)
const total = await WorkPeriod.count({ where: filter })
return {
fromDb: true,
total,
page,
perPage,
result: workPeriods
}
}
searchWorkPeriods.schema = Joi.object().keys({
currentUser: Joi.object().required(),
criteria: Joi.object().keys({
page: Joi.number().integer().min(1).default(1),
perPage: Joi.number().integer().min(1).max(10000).default(20),
sortBy: Joi.string().valid('id', 'resourceBookingId', 'userHandle', 'projectId', 'startDate', 'endDate', 'daysWorked', 'customerRate', 'memberRate', 'paymentStatus'),
sortOrder: Joi.string().valid('desc', 'asc'),
paymentStatus: Joi.alternatives(
Joi.string(),
Joi.array().items(Joi.paymentStatus())
),
startDate: Joi.date().format('YYYY-MM-DD'),
endDate: Joi.date().format('YYYY-MM-DD'),
userHandle: Joi.string(),
projectId: Joi.number().integer(),
resourceBookingId: Joi.string().uuid(),
sentSurvey: Joi.boolean(),
sentSurveyError: Joi.object().keys({
errorCode: Joi.number().integer().min(0),
errorMessage: Joi.string()
}),
resourceBookingIds: Joi.alternatives(
Joi.string(),
Joi.array().items(Joi.string().uuid())
)
}).required(),
options: Joi.object()
}).required()
module.exports = {
getWorkPeriod,
createWorkPeriod,
partiallyUpdateWorkPeriod,
deleteWorkPeriod,
searchWorkPeriods
}
<file_sep>/*
* Constants for the RCRM sync script.
*/
module.exports = {
ProcessingStatus: {
Successful: 'successful',
Failed: 'failed',
Skipped: 'skipped'
},
fieldNameMap: {
'Allow Apply': 'allowApply',
Slug: 'externalId'
}
}
<file_sep>module.exports = {
matchedSkills: {
Dropwizard: 'Dropwizard',
Nginx: 'NGINX',
'Machine Learning': 'Machine Learning',
'Force.Com': 'Force.Com Sites',
'User Interface': 'UI Prototype',
Docker: 'Docker',
Appcelerator: 'appcelerator',
Flux: 'Flux',
'Bootstrap (FRONT-END FRAMEWORK)': 'Twitter Bootstrap',
Financialforce: 'FinancialForce',
Redis: 'Redis',
Hybris: 'Hybris',
Splunk: 'Splunk',
'Lua (SCRIPTING LANGUAGE)': 'Lua',
'Jface (UI TOOLKIT)': 'Jface',
Recursion: 'Recursion',
Blackberry: 'Blackberry SDK',
Xul: 'XUL',
Mapreduce: 'MapReduce',
Nosql: 'NoSQL',
Linux: 'Linux',
Elasticsearch: 'Elasticsearch',
'Microsoft Silverlight': 'Microsoft Silverlight',
Vertica: 'Vertica',
'Windows Servers': 'Windows Server',
'Haskell (PROGRAMMING LANGUAGE)': 'Haskell',
Hyperledger: 'Hyperledger',
'Apache Cordova': 'Apache Cordova',
'Play Framework': 'Play Framework',
Zipkin: 'Zipkin',
Marklogic: 'MarkLogic',
Mysql: 'MySql',
Visualforce: 'Visualforce',
'Data Architecture': 'IBM Rational Data Architect',
'Windows Communication Foundation': 'Windows Communication Foundation',
'Jboss Seam': 'JBoss Seam',
'Java Stored Procedure (SQL)': 'Transact-SQL',
'Component Object Model (COM)': 'COM',
'Ubuntu (OPERATING SYSTEM)': 'ubuntu',
'Cobol (PROGRAMMING LANGUAGE)': 'Cobol',
'Continuous Integration': 'Continuous Integration',
'Extensible Messaging And Presence Protocol (XMPP)': 'XMPP',
Microservices: 'Microservices',
'Java Platform Micro Edition (J2ME)': 'J2ME',
'Qt (SOFTWARE)': 'Qt',
'R (PROGRAMMING LANGUAGE)': 'R',
'Scala (PROGRAMMING LANGUAGE)': 'Scala',
'Dynamic Programming': 'Dynamic Programming',
'C (PROGRAMMING LANGUAGE)': 'C#',
Typescript: 'TypeScript',
Xamarin: 'Xamarin',
'Sql Server Integration Services (SSIS)': 'SSIS',
Kubernetes: 'Kubernetes',
Inkscape: 'Inkscape',
'Ibm Websphere Portal': 'IBM WebSphere Portal',
Matlab: 'Matlab',
Jekyll: 'Jekyll',
Cassandra: 'Cassandra',
'Airplay Sdk (APPLE)': 'Apple HIG',
Jquery: 'jQuery Mobile',
'Power Bi': 'Power BI',
Json: 'JSON',
'Django (WEB FRAMEWORK)': 'Django',
'Meteor.Js': 'Meteor.js',
Clojure: 'Clojure',
'App Store (IOS)': 'iOS',
'Amazon Alexa': 'Amazon Alexa',
'Ibm Bluemix': 'IBM Bluemix',
'Extensible Stylesheet Language (XSL)': 'XSL',
'React.Js': 'React.js',
Gradle: 'Gradle',
Protractor: 'Protractor',
'Java Platform Enterprise Edition (J2EE)': 'J2EE',
Drupal: 'Drupal',
'Php (SCRIPTING LANGUAGE)': 'PHP',
'Customer Experience': 'Customer Experience (Cx)',
Mariadb: 'MariaDB',
Grommet: 'Grommet',
Clickonce: 'ClickOnce',
'Application Programming Interface (API)': 'API',
'Unit Testing': 'Unit-Testing',
'Ionic Framework': 'Ionic Framework',
Moodle: 'moodle',
Jbehave: 'JBehave',
Gremlin: 'Gremlin',
Office365: 'Office365',
'Fortran (PROGRAMMING LANGUAGE)': 'Fortran',
'Vue.Js': 'Vuejs',
'Google Maps': 'Google-Maps',
'Cloud Foundry': 'Cloud Foundry',
'Robot Framework': 'Robot Framework',
Ethereum: 'Ethereum',
Neo4J: 'Neo4J',
'Microsoft Dynamics': 'Microsoft Dynamics',
'Geospatial Information Technology (GIT)': 'Git',
Predix: 'Predix',
Gitlab: 'Gitlab',
'Windows Workflow Foundation': 'Windows Workflow Foundation',
'Javascript (PROGRAMMING LANGUAGE)': 'JavaScript',
'Backbone.Js': 'Backbone.js',
Jabber: 'Jabber',
Wordpress: 'Wordpress',
Devops: 'DevOps',
'Apache Derby': 'Apache Derby',
'Rexx (PROGRAMMING LANGUAGE)': 'IBM REXX',
'Web Scraping': 'Web scraping',
Sorting: 'Sorting',
'Message Broker': 'IBM Websphere Message Broker',
Openam: 'Openam',
Less: 'Less',
'Equinox (OSGI)': 'OSGi',
'Zend Framework': 'zend framework',
'Sketch (DESIGN SOFTWARE)': 'Sketch',
Coffeescript: 'Coffeescript',
'Gnu Image Manipulation Program (GIMP)': 'gimp',
'Node.Js': 'Node.js',
Laravel: 'laravel',
'Ruby (PROGRAMMING LANGUAGE)': 'Ruby',
Mongodb: 'MongoDB',
'Graphic Design': 'Graphic Design',
'Entity Framework': 'Entity-Framework',
'Hibernate (JAVA)': 'Hibernate',
'Data Visualization': 'Data Visualization',
'Windows Phone': 'Windows Phone',
'Bash (SCRIPTING LANGUAGE)': 'Bash',
'Akka (TOOLKIT)': 'Akka',
'Sencha Touch': 'Sencha Touch 2',
Multithreading: 'Multithreading',
Apigee: 'Apigee',
'Iso/Iec 14882 (C++)': 'C++',
'Ab Initio (SOFTWARE)': 'Ab Initio',
'Python (PROGRAMMING LANGUAGE)': 'Python',
'Big Data': 'Big data',
Vscode: 'VSCode',
Codeigniter: 'Codeigniter',
'Grunt.Js': 'Grunt.js',
'Swing (DANCE)': 'Swing',
'Groovy (PROGRAMMING LANGUAGE)': 'Groovy',
Openshift: 'OpenShift',
Integration: 'IBM Integration Bus',
Compression: 'Compression',
'Salesforce.Com': 'Salesforce.com',
'Ibm Websphere Mq': 'IBM WebSphere MQ',
'Information Architecture': 'Information Architecture (IA)',
'Ember.Js': 'Ember.js',
'Vim (TEXT EDITOR)': 'vim',
Html5: 'HTML5',
'Custom Tag': 'Custom Tag',
'Asp.Net': 'ASP.NET',
'Responsive Web Design': 'Responsive Web Design',
'Ibm Rational Software': 'IBM Rational Software Architect',
Corda: '<NAME>',
Phonegap: 'Phonegap',
Junit: 'Junit',
'Graph Theory': 'Graph Theory',
'Eclipse (SOFTWARE)': 'Eclipse',
Bigquery: 'BigQuery',
Requirejs: 'Require.js',
Flash: 'Flash',
Github: 'Github',
'Cascading Style Sheets (CSS)': 'CSS',
'Web Services': 'Web Services',
Phantomjs: 'Phantomjs',
Heroku: 'Heroku',
Geometry: 'Geometry',
'Java Message Service (JMS)': 'JMS',
'Aws Lambda': 'AWS Lambda',
Sass: 'SASS',
'Artificial Intelligence': 'AI',
Talend: 'Talend',
Quorum: 'Quorum',
Kotlin: 'Kotlin',
'Google Cloud': 'Google Cloud',
'Interaction Design': 'Interaction Design (Ixd)',
Sqlite: 'Sqlite',
Postgresql: 'PostgreSQL',
'User Experience': 'User Experience (Ux)',
Invision: 'InVision',
'Vert.X': 'Vert.X',
Oauth: 'Oauth',
Smartsheet: 'Smartsheet',
Actionscript: 'ActionScript',
Drools: 'Drools',
'Apache Kafka': 'Apache Kafka',
'Perl (PROGRAMMING LANGUAGE)': 'Perl',
Parsing: 'String Parsing',
'Product Design': 'Product Design',
Openstack: 'Openstack',
'Android (OPERATING SYSTEM)': 'Android',
'Google App Engines': 'Google App Engine',
'Apache Camel': 'Apache Camel',
'Java (PROGRAMMING LANGUAGE)': 'Java',
'Application Servers': 'IBM Websphere Application Server',
'Hypertext Markup Language (HTML)': 'HTML',
'Sitemaps (XML)': 'XML',
Clojurescript: 'ClojureScript',
Blockchain: 'Blockchain',
Cartodb: 'CartoDB',
'Oracle Databases': 'Oracle Database',
'Ibm Lotus Domino': 'IBM Lotus Domino',
Indexeddb: 'IndexedDB',
'Data Science': 'Data Science',
'Ajax (PROGRAMMING LANGUAGE)': 'Ajax',
Twilio: 'Twilio',
Selenium: 'Selenium',
Trello: 'trello',
Appium: 'Appium',
Jruby: 'Jruby',
'Ibm Db2': 'IBM DB2',
Branding: 'Branding',
'3D Reconstruction': '3D Reconstruction',
'Ibm Aix': 'IBM AiX',
'Active Directory': 'Active Directory'
},
unMatchedSkills: [
'EJB',
'Database',
'Winforms',
'Photoshop',
'.NET',
'Leaflet.js',
'Databasedotcom',
'Maven',
'Gaming',
'Go',
'Mobile',
'IBM WebSphere DataStage',
'Azure',
'Om',
'Lightning',
'File',
'Security',
'Tableau',
'Ibatis/Mybatis',
'Integrator',
'HAML',
'SFDC Apex',
'Responsive Design',
'Castor',
'Npm',
'ipfs',
'.NET System.Addins',
'TIBCO',
'Boomi',
'InDesign',
'EC2',
'Concept Design',
'nodewebkit',
'S3',
'Mozilla',
'sympfony',
'Website Design',
'Chatter',
'Calabash',
'Sinatra',
'Algorithm',
'OSx',
'Open Source',
'Frontend',
'XAML',
'VB',
'Winforms Controls',
'User Testing',
'SFDC Lightening Components',
'Forms',
'Contentful',
'bower',
'Use Case Diagrams (TcUML)',
'BizTalk',
'Infographic',
'Gulp',
'Xcode',
'Word/Rich Text',
'Spring',
'RMI',
'OmniGraffle',
'Linq',
'Swift',
'MESH01',
'MSMQ',
'yii',
'IBM Rational Application Developer',
'Illustrator',
'QlikView',
'MIDP 2.0',
'Beanstalk',
'JPA',
'SWT',
'Simulation',
'Brute Force',
'IBM Pl/1',
'Cumulocity',
'Windows',
'IBM Cognitive',
'Validation',
'IDOL OnDemand',
'Wpf',
'Hadoop',
'Search',
'Actian Database',
'Simple Math',
'Box',
'CSS3',
'LoadRunner',
'Sharepoint 3.0',
'IBM COGNOS',
'Dc.js',
'Pl/Sql',
'Cisco',
'Web methods',
'Aris',
'Remoting',
'Apex',
'VB.NET',
'PowerShell',
'Q & Bluebird',
'Microsoft Exchange',
'Swagger',
'Regex',
'UML',
'JSF',
'WCF',
'Zepto.js',
'Flight.js',
'Apache Flume',
'IBM Cloud Private',
'Activity Diagrams (Tcuml)',
'Servlet',
'Cocoa',
'Greedy',
'IBM Rational Team Concert',
'DocuSign',
'VBA',
'AngularJS',
'Mobile Design',
'Actian Data',
'doctrine',
'JSP',
'foundation',
'Axure',
'Knockout',
'F#',
'IBM Watson',
'Excel',
'Sockets',
'Siebel',
'QA',
'UITableView',
'Dynamodb',
'Solidity',
'Logo',
'travis',
'Visual-Studio',
'Espruino',
'REST',
'Hashgraph',
'tvOS',
'atom',
'Titanium',
'Shell',
'Tosca',
'Ldap',
'kraken.js',
'Performance',
'JDBC',
'D3.JS',
'Couchbase',
'CloudFactor',
'HTTP',
'ADO.NET',
'Dojo',
'Applet',
'Spark',
'AWS',
'Mainframe',
'Facebook',
'jetbrains',
'Flex',
'Ant',
'SFDC Mobile',
'HPE Haven OnDemand',
'Oracle',
'JavaBean',
'Salesforce',
'Struts',
'Function',
'Class',
'IBM Lotus Notes',
'SCSS',
'Brivo Labs',
'SAP',
'Multichain',
'List',
'Express',
'gulp',
'JMeter',
'Math',
'Image',
'Commerce Server 2009',
'IBM Design',
'Print',
'Advanced Math',
'SFDC REST APIs',
'String Manipulation',
'chrome',
'String',
'SFDC Design',
'CA',
'Oracle EBS',
'Golang',
'Simple Search',
'Pega',
'Cognitive',
'redhat',
'Marvel - Design'
]
}
<file_sep>require('dotenv').config()
module.exports = {
// the log level, default is 'debug'
LOG_LEVEL: process.env.LOG_LEVEL || 'debug',
// the server port, default is 3000
PORT: process.env.PORT || 3000,
// the server api base path
BASE_PATH: process.env.BASE_PATH || '/api/v5',
// The authorization secret used during token verification.
AUTH_SECRET: process.env.AUTH_SECRET || 'mysecret',
// The valid issuer of tokens, a json array contains valid issuer.
VALID_ISSUERS: process.env.VALID_ISSUERS || '["https://api.topcoder-dev.com", "https://api.topcoder.com", "https://topcoder-dev.auth0.com/", "https://auth.topcoder-dev.com/"]',
// Auth0 URL, used to get TC M2M token
AUTH0_URL: process.env.AUTH0_URL,
// Auth0 audience, used to get TC M2M token
AUTH0_AUDIENCE: process.env.AUTH0_AUDIENCE,
// Auth0 audience for U-Bahn
AUTH0_AUDIENCE_UBAHN: process.env.AUTH0_AUDIENCE_UBAHN,
// Auth0 token cache time, used to get TC M2M token
TOKEN_CACHE_TIME: process.env.TOKEN_CACHE_TIME,
// Auth0 client id, used to get TC M2M token
AUTH0_CLIENT_ID: process.env.AUTH0_CLIENT_ID,
// Auth0 client secret, used to get TC M2M token
AUTH0_CLIENT_SECRET: process.env.AUTH0_CLIENT_SECRET,
// Proxy Auth0 URL, used to get TC M2M token
AUTH0_PROXY_SERVER_URL: process.env.AUTH0_PROXY_SERVER_URL,
m2m: {
M2M_AUDIT_USER_ID: process.env.M2M_AUDIT_USER_ID || '00000000-0000-0000-0000-000000000000',
M2M_AUDIT_HANDLE: process.env.M2M_AUDIT_HANDLE || 'TopcoderService'
},
// the Topcoder v5 url
TC_API: process.env.TC_API || 'https://api.topcoder-dev.com/v5',
// the Topcoder Beta API url currently v5.1
TC_BETA_API: process.env.TC_BETA_API || 'https://api.topcoder-dev.com/v5.1',
// the organization id
ORG_ID: process.env.ORG_ID || '36ed815b-3da1-49f1-a043-aaed0a4e81ad',
// the referenced taxonomy id
TOPCODER_TAXONOMY_ID: process.env.TOPCODER_TAXONOMY_ID || '9cc0795a-6e12-4c84-9744-15858dba1861',
TOPCODER_USERS_API: process.env.TOPCODER_USERS_API || 'https://api.topcoder-dev.com/v3/users',
// the api to find topcoder members
TOPCODER_MEMBERS_API: process.env.TOPCODER_MEMBERS_API || 'https://api.topcoder-dev.com/v5/members',
// the v3 api to find topcoder members
TOPCODER_MEMBERS_API_V3: process.env.TOPCODER_MEMBERS_API_V3 || 'https://api.topcoder-dev.com/v3/members',
// rate limit of requests to user api
MAX_PARALLEL_REQUEST_TOPCODER_USERS_API: process.env.MAX_PARALLEL_REQUEST_TOPCODER_USERS_API || 100,
// PostgreSQL database url.
DATABASE_URL: process.env.DATABASE_URL || 'postgres://postgres:postgres@localhost:5432/postgres',
// string - PostgreSQL database target schema
DB_SCHEMA_NAME: process.env.DB_SCHEMA_NAME || 'bookings',
// the project service url
PROJECT_API_URL: process.env.PROJECT_API_URL || 'https://api.topcoder-dev.com',
// the default path for importing and exporting data
DEFAULT_DATA_FILE_PATH: './data/demo-data.json',
esConfig: {
// the elasticsearch host
HOST: process.env.ES_HOST || 'http://localhost:9200',
ELASTICCLOUD: {
// The elastic cloud id, if your elasticsearch instance is hosted on elastic cloud. DO NOT provide a value for ES_HOST if you are using this
id: process.env.ELASTICCLOUD_ID,
// The elastic cloud username for basic authentication. Provide this only if your elasticsearch instance is hosted on elastic cloud
username: process.env.ELASTICCLOUD_USERNAME,
// The elastic cloud password for basic authentication. Provide this only if your elasticsearch instance is hosted on elastic cloud
password: <PASSWORD>
},
// The Amazon region to use when using AWS Elasticsearch service
AWS_REGION: process.env.AWS_REGION || 'us-east-1', // AWS Region to be used if we use AWS ES
// the job index
ES_INDEX_JOB: process.env.ES_INDEX_JOB || 'job',
// the job candidate index
ES_INDEX_JOB_CANDIDATE: process.env.ES_INDEX_JOB_CANDIDATE || 'job_candidate',
// the resource booking index
ES_INDEX_RESOURCE_BOOKING: process.env.ES_INDEX_RESOURCE_BOOKING || 'resource_booking',
// the role index
ES_INDEX_ROLE: process.env.ES_INDEX_ROLE || 'role',
// the max bulk size in MB for ES indexing
MAX_BULK_REQUEST_SIZE_MB: process.env.MAX_BULK_REQUEST_SIZE_MB || 20,
// the max number of documents per bulk for ES indexing
MAX_BULK_NUM_DOCUMENTS: process.env.MAX_BULK_NUM_DOCUMENTS || 100
},
// Topcoder Bus API URL
BUSAPI_URL: process.env.BUSAPI_URL || 'https://api.topcoder-dev.com/v5',
// The error topic at which bus api will publish any errors
KAFKA_ERROR_TOPIC: process.env.KAFKA_ERROR_TOPIC || 'common.error.reporting',
// The originator value for the kafka messages
KAFKA_MESSAGE_ORIGINATOR: process.env.KAFKA_MESSAGE_ORIGINATOR || 'taas-api',
// topics for error
TAAS_ERROR_TOPIC: process.env.TAAS_ERROR_TOPIC || 'taas.action.error',
// topics for job service
// the create job entity Kafka message topic
TAAS_JOB_CREATE_TOPIC: process.env.TAAS_JOB_CREATE_TOPIC || 'taas.job.create',
// the update job entity Kafka message topic
TAAS_JOB_UPDATE_TOPIC: process.env.TAAS_JOB_UPDATE_TOPIC || 'taas.job.update',
// the delete job entity Kafka message topic
TAAS_JOB_DELETE_TOPIC: process.env.TAAS_JOB_DELETE_TOPIC || 'taas.job.delete',
// topics for jobcandidate service
// the create job candidate entity Kafka message topic
TAAS_JOB_CANDIDATE_CREATE_TOPIC: process.env.TAAS_JOB_CANDIDATE_CREATE_TOPIC || 'taas.jobcandidate.create',
// the update job candidate entity Kafka message topic
TAAS_JOB_CANDIDATE_UPDATE_TOPIC: process.env.TAAS_JOB_CANDIDATE_UPDATE_TOPIC || 'taas.jobcandidate.update',
// the delete job candidate entity Kafka message topic
TAAS_JOB_CANDIDATE_DELETE_TOPIC: process.env.TAAS_JOB_CANDIDATE_DELETE_TOPIC || 'taas.jobcandidate.delete',
// topics for resource booking service
// the create resource booking entity Kafka message topic
TAAS_RESOURCE_BOOKING_CREATE_TOPIC: process.env.TAAS_RESOURCE_BOOKING_CREATE_TOPIC || 'taas.resourcebooking.create',
// the update resource booking entity Kafka message topic
TAAS_RESOURCE_BOOKING_UPDATE_TOPIC: process.env.TAAS_RESOURCE_BOOKING_UPDATE_TOPIC || 'taas.resourcebooking.update',
// the delete resource booking entity Kafka message topic
TAAS_RESOURCE_BOOKING_DELETE_TOPIC: process.env.TAAS_RESOURCE_BOOKING_DELETE_TOPIC || 'taas.resourcebooking.delete',
// topics for work period service
// the create work period entity Kafka message topic
TAAS_WORK_PERIOD_CREATE_TOPIC: process.env.TAAS_WORK_PERIOD_CREATE_TOPIC || 'taas.workperiod.create',
// the update work period entity Kafka message topic
TAAS_WORK_PERIOD_UPDATE_TOPIC: process.env.TAAS_WORK_PERIOD_UPDATE_TOPIC || 'taas.workperiod.update',
// the delete work period entity Kafka message topic
TAAS_WORK_PERIOD_DELETE_TOPIC: process.env.TAAS_WORK_PERIOD_DELETE_TOPIC || 'taas.workperiod.delete',
// topics for work period payment service
// the create work period payment entity Kafka message topic
TAAS_WORK_PERIOD_PAYMENT_CREATE_TOPIC: process.env.TAAS_WORK_PERIOD_PAYMENT_CREATE_TOPIC || 'taas.workperiodpayment.create',
// the update work period payment entity Kafka message topic
TAAS_WORK_PERIOD_PAYMENT_UPDATE_TOPIC: process.env.TAAS_WORK_PERIOD_PAYMENT_UPDATE_TOPIC || 'taas.workperiodpayment.update',
// the delete work period payment entity Kafka message topic
TAAS_WORK_PERIOD_PAYMENT_DELETE_TOPIC: process.env.TAAS_WORK_PERIOD_PAYMENT_DELETE_TOPIC || 'taas.workperiodpayment.delete',
// topics for interview service
// the request interview Kafka message topic
TAAS_INTERVIEW_REQUEST_TOPIC: process.env.TAAS_INTERVIEW_REQUEST_TOPIC || 'taas.interview.requested',
// the interview update Kafka message topic
TAAS_INTERVIEW_UPDATE_TOPIC: process.env.TAAS_INTERVIEW_UPDATE_TOPIC || 'taas.interview.update',
// the interview bulk update Kafka message topic
TAAS_INTERVIEW_BULK_UPDATE_TOPIC: process.env.TAAS_INTERVIEW_BULK_UPDATE_TOPIC || 'taas.interview.bulkUpdate',
// topics for role service
// the create role entity Kafka message topic
TAAS_ROLE_CREATE_TOPIC: process.env.TAAS_ROLE_CREATE_TOPIC || 'taas.role.requested',
// the update role entity Kafka message topic
TAAS_ROLE_UPDATE_TOPIC: process.env.TAAS_ROLE_UPDATE_TOPIC || 'taas.role.update',
// the delete role entity Kafka message topic
TAAS_ROLE_DELETE_TOPIC: process.env.TAAS_ROLE_DELETE_TOPIC || 'taas.role.delete',
// the create team entity message topic, only used for eventHandler
TAAS_TEAM_CREATE_TOPIC: process.env.TAAS_TEAM_CREATE_TOPIC || 'taas.team.create',
// special kafka topics
TAAS_ACTION_RETRY_TOPIC: process.env.TAAS_ACTION_RETRY_TOPIC || 'taas.action.retry',
// the Kafka message topic for sending email
EMAIL_TOPIC: process.env.EMAIL_TOPIC || 'external.action.email',
// the Kafka message topic for creating notifications
NOTIFICATIONS_CREATE_TOPIC: process.env.NOTIFICATIONS_CREATE_TOPIC || 'notifications.action.create',
// the emails address for receiving the issue report
// REPORT_ISSUE_EMAILS may contain comma-separated list of email which is converted to array
REPORT_ISSUE_EMAILS: (process.env.REPORT_ISSUE_EMAILS || '').split(','),
// the emails address for receiving the issue report
// REPORT_ISSUE_EMAILS may contain comma-separated list of email which is converted to array
REQUEST_EXTENSION_EMAILS: (process.env.REQUEST_EXTENSION_EMAILS || '').split(','),
// the emails address for interview invitation
// INTERVIEW_INVITATION_CC_LIST may contain comma-separated list of email which is converted to array
INTERVIEW_INVITATION_CC_LIST: (process.env.INTERVIEW_INVITATION_CC_LIST || '').split(','),
// INTERVIEW_INVITATION_RECIPIENTS_LIST may contain comma-separated list of email which is converted to array
// <EMAIL> should be in the RECIPIENTS list
INTERVIEW_INVITATION_RECIPIENTS_LIST: (process.env.INTERVIEW_INVITATION_RECIPIENTS_LIST || '<EMAIL>').split(','),
// the emails address for overlapping interview
NOTIFICATION_OPS_EMAILS: (process.env.NOTIFICATION_OPS_EMAILS || '<EMAIL>').split(','),
// the slack channel for sending notifications
NOTIFICATION_SLACK_CHANNEL: process.env.NOTIFICATION_SLACK_CHANNEL || '#tass-notification',
// SendGrid email template ID for reporting issue
REPORT_ISSUE_SENDGRID_TEMPLATE_ID: process.env.REPORT_ISSUE_SENDGRID_TEMPLATE_ID,
// SendGrid email template ID for requesting extension
REQUEST_EXTENSION_SENDGRID_TEMPLATE_ID: process.env.REQUEST_EXTENSION_SENDGRID_TEMPLATE_ID,
// SendGrid email template ID for interview invitation
INTERVIEW_INVITATION_SENDGRID_TEMPLATE_ID: process.env.INTERVIEW_INVITATION_SENDGRID_TEMPLATE_ID,
// The sender (aka `from`) email for invitation.
INTERVIEW_INVITATION_SENDER_EMAIL: process.env.INTERVIEW_INVITATION_SENDER_EMAIL || '<EMAIL>',
// the URL where TaaS App is hosted
TAAS_APP_URL: process.env.TAAS_APP_URL || 'https://platform.topcoder-dev.com/taas/myteams',
// environment variables for Payment Service
ROLE_ID_SUBMITTER: process.env.ROLE_ID_SUBMITTER || '732339e7-8e30-49d7-9198-cccf9451e221',
TYPE_ID_TASK: process.env.TYPE_ID_TASK || 'ecd58c69-238f-43a4-a4bb-d172719b9f31',
DEFAULT_TIMELINE_TEMPLATE_ID: process.env.DEFAULT_TIMELINE_TEMPLATE_ID || '53a307ce-b4b3-4d6f-b9a1-3741a58f77e6',
DEFAULT_TRACK_ID: process.env.DEFAULT_TRACK_ID || '9b6fc876-f4d9-4ccb-9dfd-419247628825',
// the minimum matching rate when searching roles by skills
ROLE_MATCHING_RATE: process.env.ROLE_MATCHING_RATE || 0.66,
// member groups representing Wipro or TopCoder employee
INTERNAL_MEMBER_GROUPS: process.env.INTERNAL_MEMBER_GROUPS || ['20000000', '20000001', '20000003', '20000010', '20000015'],
// Topcoder skills cache time in minutes
TOPCODER_SKILLS_CACHE_TIME: process.env.TOPCODER_SKILLS_CACHE_TIME || 60,
// weekly survey scheduler config
WEEKLY_SURVEY: {
SWITCH: process.env.WEEKLY_SURVEY_SWITCH || 'OFF',
CRON: process.env.WEEKLY_SURVEY_CRON || '0 1 * * 7',
BASE_URL: process.env.WEEKLY_SURVEY_BASE_URL || 'https://api.surveymonkey.net/v3/surveys',
JWT_TOKEN: process.env.WEEKLY_SURVEY_JWT_TOKEN || '',
SURVEY_ID: process.env.WEEKLY_SURVEY_SURVEY_ID || '',
SURVEY_COLLECTOR_PREFIX: process.env.WEEKLY_SURVEY_SURVEY_COLLECTOR_PREFIX || 'Week ending',
SURVEY_MASTER_COLLECTOR_ID: process.env.WEEKLY_SURVEY_SURVEY_MASTER_COLLECTOR_ID || '',
SURVEY_MASTER_MESSAGE_ID: process.env.WEEKLY_SURVEY_SURVEY_MASTER_MESSAGE_ID || '',
SURVEY_CONTACT_GROUP_ID: process.env.WEEKLY_SURVEY_SURVEY_CONTACT_GROUP_ID || ''
},
// payment scheduler config
PAYMENT_PROCESSING: {
// switch off actual API calls in Payment Scheduler
SWITCH: process.env.PAYMENT_PROCESSING_SWITCH || 'OFF',
// the payment scheduler cron config
CRON: process.env.PAYMENT_PROCESSING_CRON || '0 */5 * * * *',
// the number of records processed by one time
BATCH_SIZE: parseInt(process.env.PAYMENT_PROCESSING_BATCH_SIZE || 50),
// in-progress expired to determine whether a record has been processed abnormally, moment duration format
IN_PROGRESS_EXPIRED: process.env.IN_PROGRESS_EXPIRED || 'PT1H',
// the number of max retry config
MAX_RETRY_COUNT: parseInt(process.env.PAYMENT_PROCESSING_MAX_RETRY_COUNT || 10),
// the time of retry base delay, unit: ms
RETRY_BASE_DELAY: parseInt(process.env.PAYMENT_PROCESSING_RETRY_BASE_DELAY || 100),
// the time of retry max delay, unit: ms
RETRY_MAX_DELAY: parseInt(process.env.PAYMENT_PROCESSING_RETRY_MAX_DELAY || 10000),
// the max time of one request, unit: ms
PER_REQUEST_MAX_TIME: parseInt(process.env.PAYMENT_PROCESSING_PER_REQUEST_MAX_TIME || 30000),
// the max time of one payment record, unit: ms
PER_PAYMENT_MAX_TIME: parseInt(process.env.PAYMENT_PROCESSING_PER_PAYMENT_MAX_TIME || 60000),
// the max records of payment of a minute
PER_MINUTE_PAYMENT_MAX_COUNT: parseInt(process.env.PAYMENT_PROCESSING_PER_MINUTE_PAYMENT_MAX_COUNT || 12),
// the max requests of challenge of a minute
PER_MINUTE_CHALLENGE_REQUEST_MAX_COUNT: parseInt(process.env.PAYMENT_PROCESSING_PER_MINUTE_CHALLENGE_REQUEST_MAX_COUNT || 60),
// the max requests of resource of a minute
PER_MINUTE_RESOURCE_REQUEST_MAX_COUNT: parseInt(process.env.PAYMENT_PROCESSING_PER_MINUTE_CHALLENGE_REQUEST_MAX_COUNT || 20),
// the default step fix delay, unit: ms
FIX_DELAY_STEP: parseInt(process.env.PAYMENT_PROCESSING_FIX_DELAY_STEP || 500),
// the fix delay after step of create challenge, unit: ms
FIX_DELAY_STEP_CREATE_CHALLENGE: parseInt(process.env.PAYMENT_PROCESSING_FIX_DELAY_STEP_CREATE_CHALLENGE || process.env.PAYMENT_PROCESSING_FIX_DELAY_STEP || 500),
// the fix delay after step of assign member, unit: ms
FIX_DELAY_STEP_ASSIGN_MEMBER: parseInt(process.env.PAYMENT_PROCESSING_FIX_DELAY_STEP_ASSIGN_MEMBER || process.env.PAYMENT_PROCESSING_FIX_DELAY_STEP || 500),
// the fix delay after step of activate challenge, unit: ms
FIX_DELAY_STEP_ACTIVATE_CHALLENGE: parseInt(process.env.PAYMENT_PROCESSING_FIX_DELAY_STEP_ACTIVATE_CHALLENGE || process.env.PAYMENT_PROCESSING_FIX_DELAY_STEP || 500)
},
// if a job reach this critier, system will automatically withdrawn other job applications.
JOBS_HOUR_PER_WEEK: 20,
// the mapping includes the status transformation when auto-withdrawn feature is performed on job candidates.
WITHDRAWN_STATUS_CHANGE_MAPPING: {
applied: 'withdrawn-prescreen',
'skills-test': 'withdrawn-prescreen',
'phone-screen': 'withdrawn-prescreen',
open: 'withdrawn',
interview: 'withdrawn',
selected: 'withdrawn',
offered: 'withdrawn'
},
// the sender email
NOTIFICATION_SENDER_EMAIL: process.env.NOTIFICATION_SENDER_EMAIL || '<EMAIL>',
// the email notification sendgrid template id
NOTIFICATION_SENDGRID_TEMPLATE_ID: process.env.NOTIFICATION_SENDGRID_TEMPLATE_ID,
// frequency of cron checking for available candidates for review
CRON_CANDIDATE_REVIEW: process.env.CRON_CANDIDATE_REVIEW || '00 00 13 * * 0-6',
// frequency of cron checking for coming up interviews
// when changing this to frequency other than 5 mins, please change the minutesRange in sendInterviewComingUpEmails correspondingly
CRON_INTERVIEW_COMING_UP: process.env.CRON_INTERVIEW_COMING_UP || '*/5 * * * *',
// frequency of cron checking for interview completed
// when changing this to frequency other than 5 mins, please change the minutesRange in sendInterviewCompletedEmails correspondingly
CRON_INTERVIEW_COMPLETED: process.env.CRON_INTERVIEW_COMPLETED || '*/5 * * * *',
// frequency of cron checking for post interview actions
CRON_POST_INTERVIEW: process.env.CRON_POST_INTERVIEW || '00 00 13 * * 0-6',
// frequency of cron checking for upcoming resource bookings
CRON_UPCOMING_RESOURCE_BOOKING: process.env.CRON_UPCOMING_RESOURCE_BOOKING || '00 00 13 * * 1',
// The match window for fetching interviews which are coming up
INTERVIEW_COMING_UP_MATCH_WINDOW: process.env.INTERVIEW_COMING_UP_MATCH_WINDOW || 'PT5M',
// The remind time for fetching interviews which are coming up
INTERVIEW_COMING_UP_REMIND_TIME: (process.env.INTERVIEW_COMING_UP_REMIND_TIME || 'PT1H,PT24H').split(','),
// The match window for fetching completed interviews
INTERVIEW_COMPLETED_MATCH_WINDOW: process.env.INTERVIEW_COMPLETED_MATCH_WINDOW || 'PT5M',
// The interview completed past time for fetching interviews
INTERVIEW_COMPLETED_PAST_TIME: process.env.INTERVIEW_COMPLETED_PAST_TIME || 'PT4H',
// The time before resource booking expiry when we should start sending notifications
RESOURCE_BOOKING_EXPIRY_TIME: process.env.RESOURCE_BOOKING_EXPIRY_TIME || 'P21D',
// The match window for fetching post interview actions
POST_INTERVIEW_ACTION_MATCH_WINDOW: process.env.POST_INTERVIEW_ACTION_MATCH_WINDOW || 'P1D',
// The Stripe
STRIPE_SECRET_KEY: process.env.STRIPE_SECRET_KEY,
CURRENCY: process.env.CURRENCY || 'usd',
// RCRM base URL
RCRM_APP_URL: process.env.RCRM_APP_URL || 'https://app.recruitcrm.io'
}
<file_sep># Topcoder TaaS API
## Tech Stack
- [Node.js](https://nodejs.org/) v12
- [PostgreSQL](https://www.postgresql.org/)
- [ElasticSearch](https://www.elastic.co/) v7.7
- [Apache Kafka](https://kafka.apache.org/)
## Local Setup
### Requirements
- [Node.js](https://nodejs.org/en/) v12+
- [Docker](https://www.docker.com/)
- [Docker-Compose](https://docs.docker.com/compose/install/)
### Steps to run locally
0. Make sure to use Node v12+ by command `node -v`. We recommend using [NVM](https://github.com/nvm-sh/nvm) to quickly switch to the right version:
```bash
nvm use
```
1. 📦 Install npm dependencies
```bash
npm install
```
2. ⚙ Local config
1. In the `taas-apis` root directory create `.env` file with the next environment variables. Values for **Auth0 config** should be shared with you on the forum.<br>
```bash
# Auth0 config
AUTH0_URL=
AUTH0_AUDIENCE=
AUTH0_AUDIENCE_UBAHN=
AUTH0_CLIENT_ID=
AUTH0_CLIENT_SECRET=
# necessary if you'll utilize email functionality of interviews
INTERVIEW_INVITATION_SENDGRID_TEMPLATE_ID=
INTERVIEW_INVITATION_SENDER_EMAIL=
# Locally deployed services (via docker-compose)
ES_HOST=http://dockerhost:9200
DATABASE_URL=postgres://postgres:postgres@dockerhost:5432/postgres
BUSAPI_URL=http://dockerhost:8002/v5
# stripe
STRIPE_SECRET_KEY=
CURRENCY=usd
```
- Values from this file would be automatically used by many `npm` commands.
- ⚠️ Never commit this file or its copy to the repository!
1. Set `dockerhost` to point the IP address of Docker. Docker IP address depends on your system. For example if docker is run on IP `127.0.0.1` add a the next line to your `/etc/hosts` file:
```
127.0.0.1 dockerhost
```
Alternatively, you may update `.env` file and replace `dockerhost` with your docker IP address.
3. 🚢 Start docker-compose with services which are required to start Topcoder TaaS API locally
```bash
npm run services:up
```
Wait until all containers are fully started. As a good indicator, wait until `taas-es-processor` successfully started by viewing its logs:
```bash
npm run services:logs -- -f taas-es-processor
```
<details><summary>Click to see a good logs example</summary>
<br>
- first it would be waiting for `kafka-client` to create all the required topics and exit, you would see:
```
tc-taas-es-processor | Waiting for kafka-client to exit....
```
- after that, `taas-es-processor` would be started itself. Make sure it successfully connected to Kafka, you should see 9 lines with text `Subscribed to taas.`:
```
tc-taas-es-processor | [2021-04-09T21:20:19.035Z] app INFO : Starting kafka consumer
tc-taas-es-processor | 2021-04-09T21:20:21.292Z INFO no-kafka-client Joined group taas-es-processor generationId 1 as no-kafka-client-076538fc-60dd-4ca4-a2b9-520bdf73bc9e
tc-taas-es-processor | 2021-04-09T21:20:21.293Z INFO no-kafka-client Elected as group leader
tc-taas-es-processor | 2021-04-09T21:20:21.449Z DEBUG no-kafka-client Subscribed to taas.role.update:0 offset 0 leader kafka:9093
tc-taas-es-processor | 2021-04-09T21:20:21.450Z DEBUG no-kafka-client Subscribed to taas.role.delete:0 offset 0 leader kafka:9093
tc-taas-es-processor | 2021-04-09T21:20:21.451Z DEBUG no-kafka-client Subscribed to taas.role.requested:0 offset 0 leader kafka:9093
tc-taas-es-processor | 2021-04-09T21:20:21.452Z DEBUG no-kafka-client Subscribed to taas.jobcandidate.create:0 offset 0 leader kafka:9093
tc-taas-es-processor | 2021-04-09T21:20:21.455Z DEBUG no-kafka-client Subscribed to taas.job.create:0 offset 0 leader kafka:9093
tc-taas-es-processor | 2021-04-09T21:20:21.456Z DEBUG no-kafka-client Subscribed to taas.resourcebooking.delete:0 offset 0 leader kafka:9093
tc-taas-es-processor | 2021-04-09T21:20:21.457Z DEBUG no-kafka-client Subscribed to taas.jobcandidate.delete:0 offset 0 leader kafka:9093
tc-taas-es-processor | 2021-04-09T21:20:21.458Z DEBUG no-kafka-client Subscribed to taas.jobcandidate.update:0 offset 0 leader kafka:9093
tc-taas-es-processor | 2021-04-09T21:20:21.459Z DEBUG no-kafka-client Subscribed to taas.resourcebooking.create:0 offset 0 leader kafka:9093
tc-taas-es-processor | 2021-04-09T21:20:21.461Z DEBUG no-kafka-client Subscribed to taas.job.delete:0 offset 0 leader kafka:9093
tc-taas-es-processor | 2021-04-09T21:20:21.463Z DEBUG no-kafka-client Subscribed to taas.workperiod.update:0 offset 0 leader kafka:9093
tc-taas-es-processor | 2021-04-09T21:20:21.466Z DEBUG no-kafka-client Subscribed to taas.workperiod.delete:0 offset 0 leader kafka:9093
tc-taas-es-processor | 2021-04-09T21:20:21.468Z DEBUG no-kafka-client Subscribed to taas.workperiod.create:0 offset 0 leader kafka:9093
tc-taas-es-processor | 2021-04-09T21:20:21.469Z DEBUG no-kafka-client Subscribed to taas.workperiodpayment.update:0 offset 0 leader kafka:9093
tc-taas-es-processor | 2021-04-09T21:20:21.470Z DEBUG no-kafka-client Subscribed to taas.workperiodpayment.delete:0 offset 0 leader kafka:9093
tc-taas-es-processor | 2021-04-09T21:20:21.471Z DEBUG no-kafka-client Subscribed to taas.workperiodpayment.create:0 offset 0 leader kafka:9093
tc-taas-es-processor | 2021-04-09T21:20:21.472Z DEBUG no-kafka-client Subscribed to taas.action.retry:0 offset 0 leader kafka:9093
tc-taas-es-processor | 2021-04-09T21:20:21.473Z DEBUG no-kafka-client Subscribed to taas.job.update:0 offset 0 leader kafka:9093
tc-taas-es-processor | 2021-04-09T21:20:21.474Z DEBUG no-kafka-client Subscribed to taas.resourcebooking.update:0 offset 0 leader kafka:9093
tc-taas-es-processor | [2021-04-09T21:20:21.475Z] app INFO : Initialized.......
tc-taas-es-processor | [2021-04-09T21:20:21.479Z] app INFO : common.error.reporting,taas.job.create,taas.job.update,taas.job.delete,taas.jobcandidate.create,taas.jobcandidate.update,taas.jobcandidate.delete,taas.resourcebooking.create,taas.resourcebooking.update,taas.resourcebooking.delete,taas.workperiod.create,taas.workperiod.update,taas.workperiod.delete,taas.workperiodpayment.create,taas.workperiodpayment.update,taas.interview.requested,taas.interview.update,taas.interview.bulkUpdate,taas.role.requested,taas.role.update,taas.role.delete,taas.action.retry
tc-taas-es-processor | [2021-04-09T21:20:21.480Z] app INFO : Kick Start.......
tc-taas-es-processor | ********** Topcoder Health Check DropIn listening on port 3001
tc-taas-es-processor | Topcoder Health Check DropIn started and ready to roll
```
</details>
<br>
If you want to learn more about docker-compose configuration
<details><summary>see more details here</summary>
<br>
This docker-compose file starts the next services:
| Service | Name | Port |
| --------------------------------------------------------------------------- | :---------------: | :---: |
| PostgreSQL | postgres | 5432 |
| Elasticsearch | elasticsearch | 9200 |
| Zookeeper | zookeeper | 2181 |
| Kafka | kafka | 9092 |
| [tc-bus-api](https://github.com/topcoder-platform/tc-bus-api) | tc-bus-api | 8002 |
| [taas-es-processor](https://github.com/topcoder-platform/taas-es-processor) | taas-es-processor | 5000 |
- as many of the Topcoder services in this docker-compose require Auth0 configuration for M2M calls, our docker-compose file passes environment variables `AUTH0_CLIENT_ID`, `AUTH0_CLIENT_SECRET`, `AUTH0_URL`, `AUTH0_AUDIENCE`, `AUTH0_PROXY_SERVER_URL` to its containers. docker-compose takes them from `.env` file if provided.
- `docker-compose` automatically would create Kafka topics which are used by `taas-es-processor` listed in `local/kafka-client/topics.txt`.
- To view the logs from any container inside docker-compose use the following command, replacing `SERVICE_NAME` with the corresponding value under the **Name** column in the above table:
```bash
npm run services:log -- -f SERVICE_NAME
```
- If you want to modify the code of any of the services which are run inside this docker-compose file, you can stop such service inside docker-compose by command `docker-compose -f local/docker-compose.yml stop <SERVICE_NAME>` and run the service separately, following its README file.
</details>
_NOTE: In production these dependencies / services are hosted & managed outside Topcoder TaaS API._
4. ♻ Init DB, ES
```bash
npm run local:init
```
This command will do 3 things:
- create Database tables (drop if exists)
- create Elasticsearch indexes (drop if exists)
- import demo data to Database and index it to ElasticSearch (clears any existent data if exist)
5. 🚀 Start Topcoder TaaS API
```bash
npm run dev
```
Runs the Topcoder TaaS API using nodemon, so it would be restarted after any of the files is updated.
The Topcoder TaaS API will be served on `http://localhost:3000`.
### Working on `taas-es-processor` locally
When you run `taas-apis` locally as per "[Steps to run locally](#steps-to-run-locally)" the [taas-es-processor](https://github.com/topcoder-platform/taas-es-processor) would be run for you automatically together with other services inside the docker container via `npm run services:up`.
To be able to change and test `taas-es-processor` locally you can follow the next steps:
1. Stop `taas-es-processor` inside docker by running `docker-compose -f local/docker-compose.yml stop taas-es-processor`
2. Run `taas-es-processor` separately from the source code. As `npm run services:up` already run all the dependencies for both `taas-apis` and for `taas-es-processor`. The only thing you need to do for running `taas-es-processor` locally is clone the [taas-es-processor](https://github.com/topcoder-platform/taas-es-processor) repository and inside `taas-es-processor` folder run:
- `nvm use` - to use correct Node version
- `npm run install`
- Create `.env` file with the next environment variables. Values for **Auth0 config** should be shared with you on the forum.<br>
```bash
# Auth0 config
AUTH0_URL=
AUTH0_AUDIENCE=
AUTH0_CLIENT_ID=
AUTH0_CLIENT_SECRET=
```
- Values from this file would be automatically used by many `npm` commands.
- ⚠️ Never commit this file or its copy to the repository!
- `npm run start`
## NPM Commands
| Command | Description |
| ------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------- |
| `npm run lint` | Check for for lint errors. |
| `npm run lint:fix` | Check for for lint errors and fix error automatically when possible. |
| `npm run build` | Build source code for production run into `dist` folder. |
| `npm run start` | Start app in the production mode from prebuilt `dist` folder. |
| `npm run dev` | Start app in the development mode using `nodemon`. |
| `npm run test` | Run tests. |
| `npm run init-db` | Initializes Database. |
| `npm run create-index` | Create Elasticsearch indexes. Use `-- --force` flag to skip confirmation |
| `npm run delete-index` | Delete Elasticsearch indexes. Use `-- --force` flag to skip confirmation |
| `npm run data:import <filePath>` | Imports data into ES and db from filePath (`./data/demo-data.json` is used as default). Use `-- --force` flag to skip confirmation |
| `npm run data:export <filePath>` | Exports data from ES and db into filePath (`./data/demo-data.json` is used as default). Use `-- --force` flag to skip confirmation |
| `npm run index:all` | Indexes all data from db into ES. Use `-- --force` flag to skip confirmation |
| `npm run index:jobs <jobId>` | Indexes job data from db into ES, if jobId is not given all data is indexed. Use `-- --force` flag to skip confirmation |
| `npm run index:job-candidates <jobCandidateId>` | Indexes job candidate data from db into ES, if jobCandidateId is not given all data is indexed. Use `-- --force` flag to skip confirmation |
| `npm run index:resource-bookings <resourceBookingsId>` | Indexes resource bookings data from db into ES, if resourceBookingsId is not given all data is indexed. Use `-- --force` flag to skip confirmation |
| `npm run index:roles <roleId>` | Indexes roles data from db into ES, if roleId is not given all data is indexed. Use `-- --force` flag to skip confirmation |
| `npm run services:up` | Start services via docker-compose for local development. |
| `npm run services:down` | Stop services via docker-compose for local development. |
| `npm run services:logs -- -f <service_name>` | View logs of some service inside docker-compose. |
| `npm run services:rebuild -- -f <service_name>` | Rebuild service container ignoring cache (useful when pushed something to the Git repository of service) |
| `npm run local:init` | Recreate Database and Elasticsearch indexes and populate demo data for local development (removes any existent data). |
| `npm run local:reset` | Recreate Database and Elasticsearch indexes (removes any existent data). |
| `npm run cov` | Code Coverage Report. |
| `npm run migrate` | Run any migration files which haven't run yet. |
| `npm run migrate:undo` | Revert most recent migration. |
| `npm run demo-payment-scheduler` | Create 1000 Work Periods Payment records in with status "scheduled" and various "amount" |
| `npm run emsi-mapping` | mapping EMSI tags to topcoder skills |
## Import and Export data
### 📤 Export data
To export data to the default file `data/demo-data.json`, run:
```bash
npm run data:export
```
If you want to export data to another file, run:
```bash
npm run data:export -- --file path/to-file.json
```
- List of models that will be exported are defined in `scripts/data/exportData.js`.
### 📥 Import data
⚠️ This command would clear any existent data in DB and ES before importing.
*During importing, data would be first imported to the database, and after from the database it would be indexed to the Elasticsearch index.*
To import data from the default file `data/demo-data.json`, run:
```bash
npm run data:import
```
If you want to import data from another file, run:
```bash
npm run data:import -- --file path/to-file.json
```
- List of models that will be imported are defined in `scripts/data/importData.js`.
## Kafka commands
If you've used `docker-compose` with the file `local/docker-compose.yml` during local setup to spawn kafka & zookeeper, you can use the following commands to manipulate kafka topics and messages:
(Replace `TOPIC_NAME` with the name of the desired topic)
### Create Topic
```bash
docker exec tc-taas-kafka /opt/kafka/bin/kafka-topics.sh --create --zookeeper zookeeper:2181 --partitions 1 --replication-factor 1 --topic TOPIC_NAME
```
### List Topics
```bash
docker exec tc-taas-kafka /opt/kafka/bin/kafka-topics.sh --list --zookeeper zookeeper:2181
```
### Watch Topic
```bash
docker exec tc-taas-kafka /opt/kafka/bin/kafka-console-consumer.sh --bootstrap-server localhost:9092 --topic TOPIC_NAME
```
### Post Message to Topic (from stdin)
```bash
docker exec -it tc-taas-kafka /opt/kafka/bin/kafka-console-producer.sh --broker-list localhost:9092 --topic TOPIC_NAME
```
- Enter or copy/paste the message into the console after starting this command.
## DB Migration
- `npm run migrate`: run any migration files which haven't run yet.
- `npm run migrate:undo`: revert most recent migration.
Configuration for migration is at `./config/config.json`.
The following parameters can be set in the config file or via env variables:
- `username`: set via env `DB_USERNAME`; datebase username
- `password`: set via env `DB_PASSWORD`; datebase password
- `database`: set via env `DB_NAME`; datebase name
- `host`: set via env `DB_HOST`; datebase host name
## Testing
- Run `npm run test` to execute unit tests
- Run `npm run cov` to execute unit tests and generate coverage report.
## 📋 Code Guidelines
### General Requirements
- Split code into reusable methods where applicable.
- Lint should pass.
- Unit tests should pass.
### Documentation and Utils
When we add, update or delete models and/or endpoints we have to make sure that we keep documentation and utility scripts up to date.
- **Swagger**
- **Postman**
- **ES Mapping**
- Update mapping definitions for ElasticSearch indexes inside both repositories [taas-apis](https://github.com/topcoder-platform/taas-apis) and [taas-es-processor](https://github.com/topcoder-platform/taas-es-processor).
- **Reindex**
- NPM command `index:all` should re-index data in all ES indexes.
- There should be an individual NPM command `index:*` which would re-index data only in one ES index.
- **Import/Export**
- NPM commands `data:import` and `data:export` should support importing/exporting data from/to all the models.
- **Create/Delete Index**
- NPM commands `create-index` and `delete-index` should support creating/deleting all the indexes.
- **DB Migration**
- If there are any updates in DB schemas, create a DB migration script inside `migrations` folder which would make any necessary updates to the DB schema.
- Test, that when we migrate DB from the previous state using `npm run migrate`, we get exactly the same DB schema as if we create DB from scratch using command `npm run init-db force`.
## EMSI mapping
mapping EMSI tags to topcoder skills
Run `npm run emsi-mapping` to create the mapping file
It will take about 15 minutes to create the mapping file `script/emsi-mapping/emsi-skils-mapping.js`
<file_sep>const { Sequelize, Model } = require('sequelize')
const config = require('config')
const _ = require('lodash')
const errors = require('../common/errors')
module.exports = (sequelize) => {
class ResourceBooking extends Model {
/**
* Create association between models
* @param {Object} models the database models
*/
static associate (models) {
ResourceBooking._models = models
ResourceBooking.belongsTo(models.Job, { foreignKey: 'jobId' })
ResourceBooking.hasMany(models.WorkPeriod, { as: 'workPeriods', foreignKey: 'resourceBookingId' })
}
/**
* Get resource booking by id
* @param {String} id the resource booking id
* @returns {ResourceBooking} the resource booking instance
*/
static async findById (id, options) {
const criteria = {
where: {
id
}
}
if (!_.isUndefined(options)) {
// Select ResourceBooking fields
if (options.include && options.include.length > 0) {
criteria.attributes = options.fieldsRB
} else if (options.excludeRB && options.excludeRB.length > 0) {
criteria.attributes = { exclude: options.excludeRB }
}
// include WorkPeriod model
if (options.withWorkPeriods) {
criteria.include = [{
model: ResourceBooking._models.WorkPeriod,
as: 'workPeriods',
required: false
}]
// Select WorkPeriod fields
if (!options.allWorkPeriods) {
if (options.fieldsWP && options.fieldsWP.length > 0) {
criteria.include[0].attributes = _.map(options.fieldsWP, f => _.split(f, '.')[1])
} else {
// we should include at least one workPeriod field
// if fields criteria has no workPeriod field but have workPeriodPayment field
criteria.include[0].attributes = ['id']
}
} else if (options.excludeWP && options.excludeWP.length > 0) {
criteria.include[0].attributes = { exclude: _.map(options.excludeWP, f => _.split(f, '.')[1]) }
}
// Include WorkPeriodPayment Model
if (options.withWorkPeriodPayments) {
criteria.include[0].include = [{
model: ResourceBooking._models.WorkPeriodPayment,
as: 'payments',
required: false
}]
// Select WorkPeriodPayment fields
if (!options.allWorkPeriodPayments) {
criteria.include[0].include[0].attributes = _.map(options.fieldsWPP, f => _.split(f, '.')[2])
} else if (options.excludeWPP && options.excludeWPP.length > 0) {
criteria.include[0].include[0].attributes = { exclude: _.map(options.excludeWPP, f => _.split(f, '.')[2]) }
}
}
}
}
const resourceBooking = await ResourceBooking.findOne(criteria)
if (!resourceBooking) {
throw new errors.NotFoundError(`id: ${id} "ResourceBooking" doesn't exists.`)
}
return resourceBooking
}
}
ResourceBooking.init(
{
id: {
type: Sequelize.UUID,
primaryKey: true,
allowNull: false,
defaultValue: Sequelize.UUIDV4
},
projectId: {
field: 'project_id',
type: Sequelize.INTEGER,
allowNull: false
},
userId: {
field: 'user_id',
type: Sequelize.UUID,
allowNull: false
},
jobId: {
field: 'job_id',
type: Sequelize.UUID
},
status: {
type: Sequelize.STRING(255),
allowNull: false
},
startDate: {
field: 'start_date',
type: Sequelize.DATEONLY
},
endDate: {
field: 'end_date',
type: Sequelize.DATEONLY
},
memberRate: {
field: 'member_rate',
type: Sequelize.FLOAT
},
customerRate: {
field: 'customer_rate',
type: Sequelize.FLOAT
},
rateType: {
field: 'rate_type',
type: Sequelize.STRING(255),
allowNull: false
},
sendWeeklySurvey: {
field: 'send_weekly_survey',
type: Sequelize.BOOLEAN,
defaultValue: true,
allowNull: false
},
billingAccountId: {
field: 'billing_account_id',
type: Sequelize.BIGINT
},
createdBy: {
field: 'created_by',
type: Sequelize.UUID,
allowNull: false
},
updatedBy: {
field: 'updated_by',
type: Sequelize.UUID
},
createdAt: {
field: 'created_at',
type: Sequelize.DATE
},
updatedAt: {
field: 'updated_at',
type: Sequelize.DATE
},
deletedAt: {
field: 'deleted_at',
type: Sequelize.DATE
}
},
{
schema: config.DB_SCHEMA_NAME,
sequelize,
tableName: 'resource_bookings',
paranoid: true,
deletedAt: 'deletedAt',
createdAt: 'createdAt',
updatedAt: 'updatedAt',
timestamps: true,
defaultScope: {
attributes: {
exclude: ['deletedAt']
}
},
hooks: {
afterCreate: (resourceBooking) => {
delete resourceBooking.dataValues.deletedAt
}
}
}
)
return ResourceBooking
}
<file_sep>/*
* Handle events for ResourceBooking.
*/
const { Op } = require('sequelize')
const _ = require('lodash')
const models = require('../models')
const logger = require('../common/logger')
const helper = require('../common/helper')
const JobService = require('../services/JobService')
const Job = models.Job
/**
* When a Role is deleted, jobs related to
* that role should be updated
* @param {object} payload the event payload
* @returns {undefined}
*/
async function updateJobs (payload) {
// find jobs have this role
const jobs = await Job.findAll({
where: {
roleIds: { [Op.contains]: [payload.value.id] }
},
raw: true
})
if (jobs.length === 0) {
logger.debug({
component: 'RoleEventHandler',
context: 'updateJobs',
message: `id: ${payload.value.id} role has no related job - ignored`
})
return
}
const m2mUser = helper.getAuditM2Muser()
// remove role id from related jobs
await Promise.all(_.map(jobs, async job => {
let roleIds = _.filter(job.roleIds, roleId => roleId !== payload.value.id)
if (roleIds.length === 0) {
roleIds = null
}
await JobService.partiallyUpdateJob(m2mUser, job.id, { roleIds })
}))
logger.debug({
component: 'RoleEventHandler',
context: 'updateJobs',
message: `role id: ${payload.value.id} removed from jobs with id: ${_.map(jobs, 'id')}`
})
}
/**
* Process role delete event.
*
* @param {Object} payload the event payload
* @returns {undefined}
*/
async function processDelete (payload) {
await updateJobs(payload)
}
module.exports = {
processDelete
}
<file_sep>/*
* Provide some commonly used functions for the RCRM import script.
*/
const config = require('./config')
const _ = require('lodash')
const request = require('superagent')
const commonHelper = require('../common/helper')
/*
* Function to get M2M token
* @returns {Promise}
*/
const getM2MToken = (() => {
const m2mAuth = require('tc-core-library-js').auth.m2m
const m2m = m2mAuth(_.pick(config, [
'AUTH0_URL',
'AUTH0_AUDIENCE',
'AUTH0_CLIENT_ID',
'AUTH0_CLIENT_SECRET',
'AUTH0_PROXY_SERVER_URL'
]))
return async () => {
return await m2m.getMachineToken(config.AUTH0_CLIENT_ID, config.AUTH0_CLIENT_SECRET)
}
})()
/**
* Create a new job via taas api.
*
* @param {Object} data the job data
* @returns {Object} the result
*/
async function createJob (data) {
const token = await getM2MToken()
const { body: job } = await request.post(`${config.TAAS_API_URL}/jobs`)
.set('Authorization', `Bearer ${token}`)
.set('Content-Type', 'application/json')
.send(data)
return job
}
/**
* Find taas job by external id.
*
* @param {String} externalId the external id
* @returns {Object} the result
*/
async function getJobByExternalId (externalId) {
const token = await getM2MToken()
return commonHelper.getJobByExternalId(token, config.TAAS_API_URL, externalId)
}
/**
* Update the status of a resource booking.
*
* @param {String} resourceBookingId the resource booking id
* @param {Object} data the data to update
* @returns {Object} the result
*/
async function updateResourceBooking (resourceBookingId, data) {
const token = await getM2MToken()
const { body: resourceBooking } = await request.patch(`${config.TAAS_API_URL}/resourceBookings/${resourceBookingId}`)
.set('Authorization', `Bearer ${token}`)
.set('Content-Type', 'application/json')
.send(data)
return resourceBooking
}
/**
* Find taas resource booking by job id and user id.
*
* @param {String} jobId the job id
* @param {String} userId the user id
* @returns {Object} the result
*/
async function getResourceBookingByJobIdAndUserId (jobId, userId) {
const token = await getM2MToken()
const { body: resourceBookings } = await request.get(`${config.TAAS_API_URL}/resourceBookings`)
.query({ jobId, userId })
.set('Authorization', `Bearer ${token}`)
if (!resourceBookings.length) {
throw new Error(`jobId: ${jobId} userId: ${userId} resource booking not found`)
}
return resourceBookings[0]
}
/**
* Create a new resource booking via taas api.
*
* @param {Object} data the resource booking data
* @returns {Object} the result
*/
async function createResourceBooking (data) {
const token = await getM2MToken()
const { body: resourceBooking } = await request.post(`${config.TAAS_API_URL}/resourceBookings`)
.set('Authorization', `Bearer ${token}`)
.set('Content-Type', 'application/json')
.send(data)
return resourceBooking
}
/**
* Find user via /v5/users by user handle.
*
* @param {String} handle the user handle
* @returns {Object} the result
*/
async function getUserByHandle (handle) {
const token = await getM2MToken()
const { body: users } = await request.get(`${config.TC_API}/users`)
.query({ handle })
.set('Authorization', `Bearer ${token}`)
if (!users.length) {
throw new Error(`handle: ${handle} user not found`)
}
return users[0]
}
/**
* Find project via /v5/projects by Direct project id.
*
* @param {Number} directProjectId the Direct project id
* @returns {Object} the result
*/
async function getProjectByDirectProjectId (directProjectId) {
const token = await getM2MToken()
const { body: projects } = await request.get(`${config.TC_API}/projects`)
.query({ directProjectId })
.set('Authorization', `Bearer ${token}`)
if (!projects.length) {
throw new Error(`directProjectId: ${directProjectId} project not found`)
}
return projects[0]
}
module.exports = {
sleep: commonHelper.sleep,
loadCSVFromFile: commonHelper.loadCSVFromFile,
getPathnameFromCommandline: commonHelper.getPathnameFromCommandline,
createJob,
getJobByExternalId,
updateResourceBooking,
getResourceBookingByJobIdAndUserId,
createResourceBooking,
getUserByHandle,
getProjectByDirectProjectId
}
<file_sep># Trigger and render demo Email Notifications.
This script does 2 things:
- update demo data created by `npm run local:init` inside the DB in such a way that it would create situation for Email Notifications which would be triggered by the scheduler to demonstrate all possible cases.
- start Kafka Consumer that would listen to the Kafka Topic `config.NOTIFICATIONS_CREATE_TOPIC` and if there is email notification created, it would render it using provided email template `data/notifications-email-template.html` into `out` folder.
## Usage
1. Config scheduler to run more often so we don't have to wait to long for triggering notification, like every minute:
```sh
CRON_CANDIDATE_REVIEW=0 */1 * * * *
CRON_INTERVIEW_COMING_UP=0 */1 * * * *
CRON_INTERVIEW_COMPLETED=0 */1 * * * *
CRON_POST_INTERVIEW=0 */1 * * * *
CRON_UPCOMING_RESOURCE_BOOKING=0 */1 * * * *
INTERVIEW_COMING_UP_MATCH_WINDOW=PT1M
INTERVIEW_COMPLETED_MATCH_WINDOW=PT1M
```
2. Config `SLACK_WEBHOOK_URL` env, if you want to send slack notifications
```sh
SLACK_WEBHOOK_URL=https://hooks.slack.com/services/***
```
3. Recreate demo data by:
```sh
npm run local:init`
4. Run TaaS API by:
```sh
npm run dev
```
5. Run this demo script:
```sh
node scripts/demo-email-notifications
```
Check the rendered emails inside `out` folder.
<file_sep>/**
* Controller for Role endpoints
*/
const HttpStatus = require('http-status-codes')
const service = require('../services/RoleService')
/**
* Get role by id
* @param req the request
* @param res the response
*/
async function getRole (req, res) {
res.send(await service.getRole(req.params.id, req.query.fromDb))
}
/**
* Create role
* @param req the request
* @param res the response
*/
async function createRole (req, res) {
res.send(await service.createRole(req.authUser, req.body))
}
/**
* update role by id
* @param req the request
* @param res the response
*/
async function updateRole (req, res) {
res.send(await service.updateRole(req.authUser, req.params.id, req.body))
}
/**
* Delete role by id
* @param req the request
* @param res the response
*/
async function deleteRole (req, res) {
await service.deleteRole(req.authUser, req.params.id)
res.status(HttpStatus.NO_CONTENT).end()
}
/**
* Search roles
* @param req the request
* @param res the response
*/
async function searchRoles (req, res) {
res.send(await service.searchRoles(req.query))
}
module.exports = {
getRole,
createRole,
updateRole,
deleteRole,
searchRoles
}
<file_sep>/*
* Script to sync values of Jobs from Recruit CRM to Taas API.
*/
const Joi = require('joi')
const Report = require('./report')
const config = require('./config')
const helper = require('./helper')
const constants = require('./constants')
const logger = require('./logger')
const jobSchema = Joi.object({
allowApply: Joi.string().valid('Yes', 'No').required(),
externalId: Joi.string().allow('')
}).unknown(true)
/**
* Process single job data. The processing consists of:
* - Validate the data.
* - Skip processing if externalId is missing.
* - Search job by externalId and update its `isApplicationPageActive` property
(skip processing if `isApplicationPageActive` is already set).
*
* @param {Object} job the job data
* @param {Array} info contains processing details
* @returns {Object}
*/
async function processJob (job, info = []) {
// validate the data
const { value: data, error } = jobSchema.validate(job)
data.isApplicationPageActive = data.allowApply === 'Yes'
if (error) {
info.push({ text: error.details[0].message, tag: 'validation_error' })
return { status: constants.ProcessingStatus.Failed, info }
}
// skip processing if externalId is missing
if (!data.externalId) {
info.push({ text: 'externalId is missing', tag: 'external_id_missing' })
return { status: constants.ProcessingStatus.Skipped, info }
}
try {
// search job by externalId and update its `isApplicationPageActive` property
const existingJob = await helper.getJobByExternalId(data.externalId)
logger.debug(`jobId: ${existingJob.id} isApplicationPageActive(current): ${existingJob.isApplicationPageActive} - isApplicationPageActive(to be synced): ${data.isApplicationPageActive}`)
// skip processing if `isApplicationPageActive` is already set
if (existingJob.isApplicationPageActive === data.isApplicationPageActive) {
info.push({ text: 'isApplicationPageActive is already set', tag: 'is_application_page_active_already_set' })
return { status: constants.ProcessingStatus.Skipped, info }
}
const updatedJob = await helper.updateJob(existingJob.id, { isApplicationPageActive: data.allowApply === 'Yes' })
info.push({ text: `id: ${existingJob.id} isApplicationPageActive: ${updatedJob.isApplicationPageActive} "job" updated`, tag: 'job_is_application_page_active_updated', currentValue: updatedJob.isApplicationPageActive })
return { status: constants.ProcessingStatus.Successful, info }
} catch (err) {
if (!(err.message && err.message.includes('job not found'))) {
throw err
}
info.push({ text: `[EXTERNAL_ID_NOT_FOUND] ${err.message}`, tag: 'external_id_not_found' })
return { status: constants.ProcessingStatus.Failed, info }
}
}
/**
* The entry of the script.
*
* @returns {undefined}
*/
async function main () {
const pathname = helper.getPathnameFromCommandline()
const jobs = await helper.loadCSVFromFile(pathname, constants.fieldNameMap)
const report = new Report()
for (const job of jobs) {
logger.debug(`processing line #${job._lnum} - ${JSON.stringify(job)}`)
try {
const result = await processJob(job)
report.add({ lnum: job._lnum, ...result })
} catch (err) {
if (err.response) {
report.add({ lnum: job._lnum, status: constants.ProcessingStatus.Failed, info: [{ text: err.response.error.toString().split('\n')[0], tag: 'request_error' }] })
} else {
report.add({ lnum: job._lnum, status: constants.ProcessingStatus.Failed, info: [{ text: err.message, tag: 'internal_error' }] })
}
}
report.print()
logger.debug(`processed line #${job._lnum}`)
await helper.sleep(config.SLEEP_TIME)
}
report.printSummary()
}
main().then(() => {
logger.info('done!')
process.exit()
}).catch(err => {
logger.error(err.message)
process.exit(1)
})
<file_sep>/**
* Controller for Job endpoints
*/
const HttpStatus = require('http-status-codes')
const _ = require('lodash')
const service = require('../services/JobService')
const helper = require('../common/helper')
/**
* Get job by id
* @param req the request
* @param res the response
*/
async function getJob (req, res) {
res.send(await service.getJob(req.authUser, req.params.id, req.query.fromDb))
}
/**
* Create job
* @param req the request
* @param res the response
*/
async function createJob (req, res) {
res.send(await service.createJob(req.authUser, req.body))
}
/**
* Partially update job by id
* @param req the request
* @param res the response
*/
async function partiallyUpdateJob (req, res) {
res.send(await service.partiallyUpdateJob(req.authUser, req.params.id, req.body))
}
/**
* Fully update job by id
* @param req the request
* @param res the response
*/
async function fullyUpdateJob (req, res) {
res.send(await service.fullyUpdateJob(req.authUser, req.params.id, req.body))
}
/**
* Delete job by id
* @param req the request
* @param res the response
*/
async function deleteJob (req, res) {
await service.deleteJob(req.authUser, req.params.id)
res.status(HttpStatus.NO_CONTENT).end()
}
/**
* Search jobs
* @param req the request
* @param res the response
*/
async function searchJobs (req, res) {
const query = { ...req.query, jobIds: _.get(req, 'body.jobIds', []) }
const result = await service.searchJobs(req.authUser, query)
helper.setResHeaders(req, res, result)
res.send(result.result)
}
module.exports = {
getJob,
createJob,
partiallyUpdateJob,
fullyUpdateJob,
deleteJob,
searchJobs
}
<file_sep>const Kafka = require('no-kafka')
const fs = require('fs')
const config = require('config')
const axios = require('axios')
const _ = require('lodash')
const moment = require('moment')
const handlebars = require('handlebars')
const logger = require('../../src/common/logger')
const { Interview, JobCandidate, ResourceBooking } = require('../../src/models')
const { Interviews } = require('../../app-constants')
const consumer = new Kafka.GroupConsumer({ connectionString: process.env.KAFKA_URL, groupId: 'test-render-email' })
const localLogger = {
debug: message => logger.debug({ component: 'render email content', context: 'test', message }),
info: message => logger.info({ component: 'render email content', context: 'test', message })
}
const template = handlebars.compile(fs.readFileSync('./data/notifications-email-template.html', 'utf8'))
/**
* Reset notification records
*/
async function resetNotificationRecords () {
// reset coming up interview records
localLogger.info('reset coming up interview records')
const interview = await Interview.findById('976d23a9-5710-453f-99d9-f57a588bb610')
const startTimestamp = moment().add(moment.duration(config.INTERVIEW_COMING_UP_REMIND_TIME[0])).add(config.INTERVIEW_COMING_UP_MATCH_WINDOW).toDate()
await interview.update({ startTimestamp, duration: 30, status: Interviews.Status.Scheduled, guestNames: ['test1', 'test2'], hostName: 'hostName' })
// reset completed interview records
localLogger.info('reset completed interview records')
const pastTime = moment.duration(config.INTERVIEW_COMPLETED_PAST_TIME)
const completedStartTimestamp = moment().subtract(pastTime).add(config.INTERVIEW_COMPLETED_MATCH_WINDOW).toDate()
const completedInterview = await Interview.findById('9efd72c3-1dc7-4ce2-9869-8cca81d0adeb')
const duration = 30
const completedEndTimestamp = moment(completedStartTimestamp).clone().add(30, 'm').toDate()
await completedInterview.update({ startTimestamp: completedStartTimestamp, duration, endTimeStamp: completedEndTimestamp, status: Interviews.Status.Scheduled, guestNames: ['guest1', 'guest2'], hostName: 'hostName' })
const completedInterview2 = await Interview.findById('3144fa65-ea1a-4bec-81b0-7cb1c8845826')
await completedInterview2.update({ startTimestamp: completedStartTimestamp, duration, endTimeStamp: completedEndTimestamp, status: Interviews.Status.Scheduled, guestNames: ['guest1', 'guest2'], hostName: 'hostName' })
// reset post interview candidate action reminder records
localLogger.info('reset post interview candidate action reminder records')
const jobCandidate = await JobCandidate.findById('827ee401-df04-42e1-abbe-7b97ce7937ff')
await jobCandidate.update({ status: 'interview' })
const c2Interview = await Interview.findById('3144fa65-ea1a-4bec-81b0-7cb1c8845826')
await c2Interview.update({ startTimestamp: moment().subtract(moment.duration(config.POST_INTERVIEW_ACTION_MATCH_WINDOW)).subtract(30, 'm').toDate(), duration, endTimeStamp: completedEndTimestamp, guestNames: ['guest1', 'guest2'], hostName: 'hostName' })
const c2InterviewR2 = await Interview.findById('b1f7ba76-640f-47e2-9463-59e51b51ec60')
await c2InterviewR2.update({ status: 'Scheduled', startTimestamp: moment().subtract(moment.duration(config.POST_INTERVIEW_ACTION_MATCH_WINDOW)).subtract(30, 'm').toDate(), duration, endTimeStamp: completedEndTimestamp, guestNames: ['guest1', 'guest2'], hostName: 'hostName' })
// reset upcoming resource booking expiration records
localLogger.info('reset upcoming resource booking expiration records')
const resourceBooking = await ResourceBooking.findById('62c3f0c9-2bf0-4f24-8647-2c802a39cbcb')
const testEnd = moment().add(moment.duration(config.RESOURCE_BOOKING_EXPIRY_TIME)).toDate()
await resourceBooking.update({ endDate: testEnd })
}
/**
* Init consumer.
*/
async function initConsumer () {
await consumer
.init([{
subscriptions: [config.NOTIFICATIONS_CREATE_TOPIC],
handler: async (messageSet, topic, partition) => {
localLogger.debug(`Consumer handler. Topic: ${topic}, partition: ${partition}, message set length: ${messageSet.length}`)
for (const m of messageSet) {
const message = JSON.parse(m.message.value.toString('utf8'))
if (!fs.existsSync('out')) {
fs.mkdirSync('out')
}
if (message.payload.notifications) {
_.forEach(_.filter(message.payload.notifications, ['serviceId', 'email']), (notification) => {
const email = template(notification.details.data)
fs.writeFileSync(`./out/${notification.details.data.subject}-${Date.now()}.html`, email)
})
for (const notification of _.filter(message.payload.notifications, ['serviceId', 'slack'])) {
if (process.env.SLACK_WEBHOOK_URL) {
await axios.post(process.env.SLACK_WEBHOOK_URL, { text: notification.details.text, blocks: notification.details.blocks })
}
}
}
}
}
}])
.then(() => {
localLogger.info('Initialized.......')
localLogger.info([config.NOTIFICATIONS_CREATE_TOPIC])
localLogger.info('Kick Start.......')
}).catch(err => {
logger.logFullError(err, { component: 'app' })
})
}
/**
* Main function
*/
async function main () {
await resetNotificationRecords()
await initConsumer()
}
main()
<file_sep>const config = require('config')
const ResourceBooking = require('../src/models').ResourceBooking
const _ = require('lodash')
const helper = require('../src/common/helper')
const { v4: uuid } = require('uuid')
const { AggregatePaymentStatus } = require('../app-constants')
// maximum start date of resource bookings when populating work periods from existing resource bookings in migration script
const MAX_START_DATE = process.env.MAX_START_DATE || '2100-12-31'
// maximum end date of resource bookings when populating work periods from existing resource bookings in migration script
const MAX_END_DATE = process.env.MAX_END_DATE || '2100-12-31'
/*
* Populate WorkPeriods for ResourceBookings
*/
module.exports = {
up: async (queryInterface, Sequelize) => {
const transaction = await queryInterface.sequelize.transaction()
const Op = Sequelize.Op
try {
await queryInterface.bulkDelete({
tableName: 'payment_schedulers',
schema: config.DB_SCHEMA_NAME,
transaction
})
await queryInterface.bulkDelete({
tableName: 'work_period_payments',
schema: config.DB_SCHEMA_NAME,
transaction
})
await queryInterface.bulkDelete({
tableName: 'work_periods',
schema: config.DB_SCHEMA_NAME,
transaction
})
await queryInterface.removeColumn({ tableName: 'work_periods', schema: config.DB_SCHEMA_NAME }, 'member_rate', { transaction })
await queryInterface.removeColumn({ tableName: 'work_periods', schema: config.DB_SCHEMA_NAME }, 'customer_rate', { transaction })
await queryInterface.addColumn({ tableName: 'work_periods', schema: config.DB_SCHEMA_NAME }, 'days_paid',
{ type: Sequelize.INTEGER, allowNull: false },
{ transaction })
await queryInterface.addColumn({ tableName: 'work_periods', schema: config.DB_SCHEMA_NAME }, 'payment_total',
{ type: Sequelize.FLOAT, allowNull: false },
{ transaction })
await queryInterface.changeColumn({ tableName: 'work_periods', schema: config.DB_SCHEMA_NAME }, 'days_worked',
{ type: Sequelize.INTEGER, allowNull: false },
{ transaction })
await queryInterface.addColumn({ tableName: 'work_period_payments', schema: config.DB_SCHEMA_NAME }, 'member_rate',
{ type: Sequelize.FLOAT, allowNull: false },
{ transaction })
await queryInterface.addColumn({ tableName: 'work_period_payments', schema: config.DB_SCHEMA_NAME }, 'customer_rate',
{ type: Sequelize.FLOAT, allowNull: true },
{ transaction })
await queryInterface.addColumn({ tableName: 'work_period_payments', schema: config.DB_SCHEMA_NAME }, 'days',
{ type: Sequelize.INTEGER, allowNull: false },
{ transaction })
await queryInterface.changeColumn({ tableName: 'work_period_payments', schema: config.DB_SCHEMA_NAME }, 'amount',
{ type: Sequelize.DOUBLE, allowNull: false },
{ transaction })
await queryInterface.changeColumn({ tableName: 'work_period_payments', schema: config.DB_SCHEMA_NAME }, 'billing_account_id',
{ type: Sequelize.BIGINT, allowNull: false },
{ transaction })
const resourceBookings = await ResourceBooking.findAll({
where: {
start_date: { [Op.lt]: new Date(MAX_START_DATE) },
end_date: { [Op.lt]: new Date(MAX_END_DATE) }
}
})
if (resourceBookings.length === 0) {
return
}
const workPeriodData = []
for (const rb of resourceBookings) {
if (!_.isNil(rb.startDate) && !_.isNil(rb.endDate)) {
const periods = helper.extractWorkPeriods(rb.startDate, rb.endDate)
const user = await helper.ensureUserById(rb.userId)
_.forEach(periods, period => {
workPeriodData.push({
id: uuid(),
resource_booking_id: rb.id,
project_id: rb.projectId,
user_handle: user.handle,
start_date: period.startDate,
end_date: period.endDate,
days_worked: period.daysWorked,
days_paid: 0,
payment_total: 0,
payment_status: period.daysWorked === 0 ? AggregatePaymentStatus.NO_DAYS : AggregatePaymentStatus.PENDING,
created_by: config.m2m.M2M_AUDIT_USER_ID,
created_at: new Date()
})
})
}
}
await queryInterface.bulkInsert({ tableName: 'work_periods', schema: config.DB_SCHEMA_NAME }, workPeriodData, { transaction })
await transaction.commit()
} catch (err) {
await transaction.rollback()
throw err
}
},
down: async (queryInterface, Sequelize) => {
const transaction = await queryInterface.sequelize.transaction()
try {
await queryInterface.bulkDelete({
tableName: 'payment_schedulers',
schema: config.DB_SCHEMA_NAME,
transaction
})
await queryInterface.bulkDelete({
tableName: 'work_period_payments',
schema: config.DB_SCHEMA_NAME,
transaction
})
await queryInterface.bulkDelete({
tableName: 'work_periods',
schema: config.DB_SCHEMA_NAME,
transaction
})
await queryInterface.removeColumn({ tableName: 'work_periods', schema: config.DB_SCHEMA_NAME }, 'days_paid', { transaction })
await queryInterface.removeColumn({ tableName: 'work_periods', schema: config.DB_SCHEMA_NAME }, 'payment_total', { transaction })
await queryInterface.addColumn({ tableName: 'work_periods', schema: config.DB_SCHEMA_NAME }, 'member_rate',
{ type: Sequelize.FLOAT, allowNull: true },
{ transaction })
await queryInterface.addColumn({ tableName: 'work_periods', schema: config.DB_SCHEMA_NAME }, 'customer_rate',
{ type: Sequelize.FLOAT, allowNull: true },
{ transaction })
await queryInterface.changeColumn({ tableName: 'work_periods', schema: config.DB_SCHEMA_NAME }, 'days_worked',
{ type: Sequelize.INTEGER, allowNull: true },
{ transaction })
await queryInterface.removeColumn({ tableName: 'work_period_payments', schema: config.DB_SCHEMA_NAME }, 'member_rate', { transaction })
await queryInterface.removeColumn({ tableName: 'work_period_payments', schema: config.DB_SCHEMA_NAME }, 'customer_rate', { transaction })
await queryInterface.removeColumn({ tableName: 'work_period_payments', schema: config.DB_SCHEMA_NAME }, 'days', { transaction })
await queryInterface.changeColumn({ tableName: 'work_period_payments', schema: config.DB_SCHEMA_NAME }, 'amount',
{ type: Sequelize.DOUBLE, allowNull: true },
{ transaction })
await queryInterface.changeColumn({ tableName: 'work_period_payments', schema: config.DB_SCHEMA_NAME }, 'billing_account_id',
{ type: Sequelize.BIGINT, allowNull: true },
{ transaction })
await transaction.commit()
} catch (err) {
await transaction.rollback()
throw err
}
}
}
<file_sep>### DEMO PAYMENT SCRIPT
This demo script tests the functionality of PaymentService.
Parameters for creating payments are hardcoded in the script. There are severel groups of parameters, each of them tests a certain functionality of the demo service. You can always insert new group of parameters to run in the script.
Before start set the following environment variables:
AUTH0_URL=
AUTH0_AUDIENCE=
AUTH0_AUDIENCE_UBAHN=
AUTH0_CLIENT_ID=
AUTH0_CLIENT_SECRET=
To run the script use the following commands:
```
npm install
npm run lint
npm run demo-payment
```
Read the logger to see results.<file_sep>/**
* Controller for WorkPeriod endpoints
*/
const service = require('../services/WorkPeriodService')
const helper = require('../common/helper')
/**
* Get workPeriod by id
* @param req the request
* @param res the response
*/
async function getWorkPeriod (req, res) {
res.send(await service.getWorkPeriod(req.authUser, req.params.id, req.query.fromDb))
}
/**
* Partially update workPeriod by id
* @param req the request
* @param res the response
*/
async function partiallyUpdateWorkPeriod (req, res) {
res.send(await service.partiallyUpdateWorkPeriod(req.authUser, req.params.id, req.body))
}
/**
* Search workPeriods
* @param req the request
* @param res the response
*/
async function searchWorkPeriods (req, res) {
const result = await service.searchWorkPeriods(req.authUser, req.query)
helper.setResHeaders(req, res, result)
res.send(result.result)
}
module.exports = {
getWorkPeriod,
partiallyUpdateWorkPeriod,
searchWorkPeriods
}
<file_sep>const { Sequelize, Model } = require('sequelize')
const config = require('config')
const errors = require('../common/errors')
module.exports = (sequelize) => {
class RoleSearchRequest extends Model {
/**
* Create association between models
* @param {Object} models the database models
*/
static associate (models) {
RoleSearchRequest._models = models
RoleSearchRequest.belongsTo(models.Role, {
foreignKey: 'roleId'
})
}
/**
* Get RoleSearchRequest by id
* @param {String} id the RoleSearchRequest id
* @returns {RoleSearchRequest} the RoleSearchRequest instance
*/
static async findById (id) {
const roleSearchRequest = await RoleSearchRequest.findOne({
where: {
id
}
})
if (!roleSearchRequest) {
throw new errors.NotFoundError(`id: ${id} "RoleSearchRequest" doesn't exists.`)
}
return roleSearchRequest
}
}
RoleSearchRequest.init(
{
id: {
type: Sequelize.UUID,
primaryKey: true,
allowNull: false,
defaultValue: Sequelize.UUIDV4
},
memberId: {
field: 'member_id',
type: Sequelize.UUID
},
previousRoleSearchRequestId: {
field: 'previous_role_search_request_id',
type: Sequelize.UUID
},
roleId: {
field: 'role_id',
type: Sequelize.UUID,
allowNull: true
},
jobDescription: {
field: 'job_description',
type: Sequelize.STRING(100000)
},
skills: {
type: Sequelize.ARRAY({
type: Sequelize.UUID
})
},
jobTitle: {
field: 'job_title',
type: Sequelize.STRING(100),
allowNull: true
},
createdBy: {
field: 'created_by',
type: Sequelize.UUID,
allowNull: false
},
updatedBy: {
field: 'updated_by',
type: Sequelize.UUID
},
createdAt: {
field: 'created_at',
type: Sequelize.DATE
},
updatedAt: {
field: 'updated_at',
type: Sequelize.DATE
},
deletedAt: {
field: 'deleted_at',
type: Sequelize.DATE
}
},
{
schema: config.DB_SCHEMA_NAME,
sequelize,
tableName: 'role_search_requests',
paranoid: true,
deletedAt: 'deletedAt',
createdAt: 'createdAt',
updatedAt: 'updatedAt',
timestamps: true,
defaultScope: {
attributes: {
exclude: ['deletedAt']
}
},
hooks: {
afterCreate: (role) => {
delete role.dataValues.deletedAt
}
}
}
)
return RoleSearchRequest
}
<file_sep>const config = require('config')
/**
* Add jobTitle field to the RoleSearchRequest model.
*/
module.exports = {
up: async (queryInterface, Sequelize) => {
await queryInterface.addColumn({ tableName: 'role_search_requests', schema: config.DB_SCHEMA_NAME }, 'job_title',
{
type: Sequelize.STRING(100),
allowNull: true
})
},
down: async (queryInterface, Sequelize) => {
await queryInterface.removeColumn({ tableName: 'role_search_requests', schema: config.DB_SCHEMA_NAME}, 'job_title')
}
}<file_sep>const config = require('config')
/*
* Create work_periods table and reference to the "resource_bookings" table
*/
module.exports = {
up: async (queryInterface, Sequelize) => {
// because our migration have more than one step we use transaction
const transaction = await queryInterface.sequelize.transaction()
try {
await queryInterface.createTable('work_periods', {
id: {
type: Sequelize.UUID,
primaryKey: true,
allowNull: false,
defaultValue: Sequelize.UUIDV4
},
resourceBookingId: {
field: 'resource_booking_id',
type: Sequelize.UUID,
allowNull: false,
references: {
model: {
tableName: 'resource_bookings',
schema: config.DB_SCHEMA_NAME
},
key: 'id'
},
onUpdate: 'CASCADE',
onDelete: 'CASCADE'
},
userHandle: {
field: 'user_handle',
type: Sequelize.STRING(50),
allowNull: false
},
projectId: {
field: 'project_id',
type: Sequelize.INTEGER,
allowNull: false
},
startDate: {
field: 'start_date',
type: Sequelize.DATEONLY,
allowNull: false
},
endDate: {
field: 'end_date',
type: Sequelize.DATEONLY,
allowNull: false
},
daysWorked: {
field: 'days_worked',
type: Sequelize.INTEGER
},
memberRate: {
field: 'member_rate',
type: Sequelize.FLOAT
},
customerRate: {
field: 'customer_rate',
type: Sequelize.FLOAT
},
paymentStatus: {
field: 'payment_status',
type: Sequelize.STRING(50),
allowNull: false
},
createdBy: {
field: 'created_by',
type: Sequelize.UUID,
allowNull: false
},
updatedBy: {
field: 'updated_by',
type: Sequelize.UUID
},
createdAt: {
field: 'created_at',
type: Sequelize.DATE
},
updatedAt: {
field: 'updated_at',
type: Sequelize.DATE
},
deletedAt: {
field: 'deleted_at',
type: Sequelize.DATE
}
}, {
schema: config.DB_SCHEMA_NAME,
transaction
})
await queryInterface.addIndex(
{
tableName: 'work_periods',
schema: config.DB_SCHEMA_NAME
},
['resource_booking_id', 'start_date', 'end_date'],
{
type: 'UNIQUE',
where: { deleted_at: null },
transaction: transaction
}
)
await transaction.commit()
} catch (err) {
await transaction.rollback()
throw err
}
},
down: async (queryInterface, Sequelize) => {
await queryInterface.dropTable({
tableName: 'work_periods',
schema: config.DB_SCHEMA_NAME
})
}
}
<file_sep>/**
* This service provides operations of WorkPeriod.
*/
const _ = require('lodash')
const Joi = require('joi').extend(require('@joi/date'))
const config = require('config')
const HttpStatus = require('http-status-codes')
const { Op } = require('sequelize')
const uuid = require('uuid')
const moment = require('moment')
const helper = require('../common/helper')
const logger = require('../common/logger')
const errors = require('../common/errors')
const models = require('../models')
const { WorkPeriodPaymentStatus, ActiveWorkPeriodPaymentStatuses } = require('../../app-constants')
const { searchResourceBookings } = require('./ResourceBookingService')
const {
processCreate,
processUpdate
} = require('../esProcessors/WorkPeriodPaymentProcessor')
const sequelize = models.sequelize
const WorkPeriodPayment = models.WorkPeriodPayment
const WorkPeriod = models.WorkPeriod
const esClient = helper.getESClient()
/**
* Check user permission for creating, updating or getting
* work period payment.
* Only Booking Manager, Admin, and M2M has access to create, view or update payments
* @param {Object} currentUser the user who perform this operation.
* @returns {undefined}
*/
function _checkUserPermissionForCRUWorkPeriodPayment (currentUser) {
if (!currentUser.hasManagePermission && !currentUser.isMachine) {
throw new errors.ForbiddenError('You are not allowed to perform this action!')
}
}
/**
* Create single workPeriodPayment
* @param {Object} workPeriodPayment the workPeriodPayment to be created
* @param {String} createdBy the authUser id
* @returns {Object} the created workPeriodPayment
*/
async function _createSingleWorkPeriodPayment (workPeriodPayment, createdBy) {
const correspondingWorkPeriod = await helper.ensureWorkPeriodById(workPeriodPayment.workPeriodId) // ensure work period exists
// get billingAccountId from corresponding resource booking
const correspondingResourceBooking = await helper.ensureResourceBookingById(correspondingWorkPeriod.resourceBookingId)
return _createSingleWorkPeriodPaymentWithWorkPeriodAndResourceBooking(workPeriodPayment, createdBy, correspondingWorkPeriod.toJSON(), correspondingResourceBooking.toJSON())
}
/**
* update challenge
* @param {String} challengeId the challenge id
* @param {Object} data the challenge update data
* @returns {undefined}
*/
async function _updateChallenge (challengeId, data) {
const body = {}
if (data.billingAccountId) {
body.billing = {
billingAccountId: _.toString(data.billingAccountId),
markup: 0 // for TaaS payments we always use 0 markup
}
}
if (data.billingAccountId) {
try {
await helper.updateChallenge(challengeId, body)
logger.debug({ component: 'WorkPeriodPaymentService', context: 'updateChallenge', message: `Challenge with id ${challengeId} is updated` })
} catch (err) {
logger.error({ component: 'WorkPeriodPaymentService', context: 'updateChallenge', message: _.get(err, 'response.text', err.toString()) })
throw new errors.BadRequestError(`Cannot update the the challenge: ${_.get(err, 'response.text', err.toString())}`)
}
}
}
/**
* Create single workPeriodPayment
* @param {Object} workPeriodPayment the workPeriodPayment to be created
* @param {String} createdBy the authUser id
* @param {Object} correspondingWorkPeriod the workPeriod
* @param {Object} correspondingResourceBooking the resourceBooking
* @returns {Object} the created workPeriodPayment
*/
async function _createSingleWorkPeriodPaymentWithWorkPeriodAndResourceBooking (workPeriodPayment, createdBy, correspondingWorkPeriod, correspondingResourceBooking) {
if (_.isNil(correspondingResourceBooking.billingAccountId)) {
throw new errors.ConflictError(`id: ${correspondingResourceBooking.id} "ResourceBooking" Billing account is not assigned to the resource booking`)
}
workPeriodPayment.billingAccountId = correspondingResourceBooking.billingAccountId
// TODO: we should allow `memberRate` to be `null` as it's not required for additional payments
workPeriodPayment.memberRate = _.defaultTo(correspondingResourceBooking.memberRate, 0)
workPeriodPayment.customerRate = _.defaultTo(correspondingResourceBooking.customerRate, null)
if (!_.has(workPeriodPayment, 'days') || workPeriodPayment.days > 0) {
if (_.isNil(correspondingResourceBooking.memberRate)) {
throw new errors.ConflictError(`Can't find a member rate in ResourceBooking: ${correspondingResourceBooking.id} to calculate the amount`)
}
if (correspondingResourceBooking.memberRate <= 0) {
throw new errors.ConflictError(`Can't process payment with member rate: ${correspondingResourceBooking.memberRate}. It must be higher than 0`)
}
const maxPossibleDays = correspondingWorkPeriod.daysWorked - correspondingWorkPeriod.daysPaid
if (workPeriodPayment.days > maxPossibleDays) {
throw new errors.BadRequestError(`Days cannot be more than not paid days which is ${maxPossibleDays}`)
}
if (maxPossibleDays <= 0) {
throw new errors.ConflictError(`There are no days to pay for WorkPeriod: ${correspondingWorkPeriod.id}`)
}
const workPeriodStartTime = moment(`${correspondingWorkPeriod.startDate}T00:00:00.000+12`)
if (workPeriodStartTime.isAfter(moment())) {
throw new errors.BadRequestError(`Cannot process payments for the future WorkPeriods. You can process after ${workPeriodStartTime.diff(moment(), 'hours')} hours`)
}
workPeriodPayment.days = _.defaultTo(workPeriodPayment.days, maxPossibleDays)
workPeriodPayment.amount = _.round(workPeriodPayment.memberRate * workPeriodPayment.days / 5, 2)
}
workPeriodPayment.id = uuid.v4()
workPeriodPayment.status = WorkPeriodPaymentStatus.SCHEDULED
workPeriodPayment.createdBy = createdBy
const key = `workPeriodPayment.billingAccountId:${workPeriodPayment.billingAccountId}`
let entity
try {
await sequelize.transaction(async (t) => {
const created = await WorkPeriodPayment.create(workPeriodPayment, { transaction: t })
entity = created.toJSON()
await processCreate({ ...entity, key })
})
} catch (err) {
if (entity) {
helper.postErrorEvent(config.TAAS_ERROR_TOPIC, entity, 'workperiodpayment.create')
}
throw err
}
await helper.postEvent(config.TAAS_WORK_PERIOD_PAYMENT_CREATE_TOPIC, entity, { key })
return entity
}
/**
* Get workPeriodPayment by id
* @param {Object} currentUser the user who perform this operation.
* @param {String} id the workPeriodPayment id
* @param {Boolean} fromDb flag if query db for data or not
* @returns {Object} the workPeriodPayment
*/
async function getWorkPeriodPayment (currentUser, id, fromDb = false) {
// check user permission
_checkUserPermissionForCRUWorkPeriodPayment(currentUser)
if (!fromDb) {
try {
const resourceBooking = await esClient.search({
index: config.esConfig.ES_INDEX_RESOURCE_BOOKING,
_source: 'workPeriods.payments',
body: {
query: {
nested: {
path: 'workPeriods.payments',
query: {
match: { 'workPeriods.payments.id': id }
}
}
}
}
})
if (!resourceBooking.body.hits.total.value) {
throw new errors.NotFoundError()
}
let workPeriodPaymentRecord = null
_.forEach(resourceBooking.body.hits.hits[0]._source.workPeriods, wp => {
_.forEach(wp.payments, p => {
if (p.id === id) {
workPeriodPaymentRecord = p
return false
}
})
if (workPeriodPaymentRecord) {
return false
}
})
return workPeriodPaymentRecord
} catch (err) {
if (err.httpStatus === HttpStatus.NOT_FOUND) {
throw new errors.NotFoundError(`id: ${id} "WorkPeriodPayment" not found`)
}
if (err.httpStatus === HttpStatus.FORBIDDEN) {
throw err
}
logger.logFullError(err, { component: 'WorkPeriodPaymentService', context: 'getWorkPeriodPayment' })
}
}
logger.info({ component: 'WorkPeriodPaymentService', context: 'getWorkPeriodPayment', message: 'try to query db for data' })
const workPeriodPayment = await WorkPeriodPayment.findById(id)
return workPeriodPayment
}
getWorkPeriodPayment.schema = Joi.object().keys({
currentUser: Joi.object().required(),
id: Joi.string().guid().required(),
fromDb: Joi.boolean()
}).required()
/**
* Create workPeriodPayment
* @param {Object} currentUser the user who perform this operation
* @param {Object} workPeriodPayment the workPeriodPayment to be created
* @returns {Object} the created workPeriodPayment
*/
async function createWorkPeriodPayment (currentUser, workPeriodPayment) {
// check permission
_checkUserPermissionForCRUWorkPeriodPayment(currentUser)
const createdBy = await helper.getUserId(currentUser.userId)
return await _createSingleWorkPeriodPayment(workPeriodPayment, createdBy)
}
const singleCreateWorkPeriodPaymentSchema = Joi.object().keys({
workPeriodId: Joi.string().uuid().required(),
days: Joi.number().integer().min(0).max(10),
amount: Joi.when('days', {
is: Joi.number().integer().valid(0).exist(),
then: Joi.number().greater(0).required().messages({
'any.required': '"amount" has to be provided when processing additional payment for 0 days'
}),
otherwise: Joi.forbidden()
})
}).required()
createWorkPeriodPayment.schema = Joi.object().keys({
currentUser: Joi.object().required(),
workPeriodPayment: singleCreateWorkPeriodPaymentSchema
})
/**
* Create workPeriodPayments in bulk
* @param {Object} currentUser the user who perform this operation
* @param {Array<Object>} workPeriodPayments the workPeriodPayment to be created
* @returns {Array<Object>} the created workPeriodPayments
*/
async function createBulkOfWorkPeriodPayments (currentUser, workPeriodPayments) {
// check permission
_checkUserPermissionForCRUWorkPeriodPayment(currentUser)
const createdBy = await helper.getUserId(currentUser.userId)
const result = []
for (const wp of workPeriodPayments) {
try {
const successResult = await _createSingleWorkPeriodPayment(wp, createdBy)
result.push(successResult)
} catch (e) {
result.push(_.extend(_.pick(wp, 'workPeriodId'), { error: { message: e.message, code: e.httpStatus } }))
}
}
return result
}
createBulkOfWorkPeriodPayments.schema = Joi.object().keys({
currentUser: Joi.object().required(),
workPeriodPayments: Joi.array().min(1).items(singleCreateWorkPeriodPaymentSchema).required()
}).required()
/**
* Update workPeriodPayment
* @param {String} id the workPeriod id
* @param {Object} data the data to be updated
* @returns {Object} the updated workPeriodPayment
*/
async function updateWorkPeriodPayment (id, data) {
const workPeriodPayment = await WorkPeriodPayment.findById(id)
const oldValue = workPeriodPayment.toJSON()
if (oldValue.status === 'in-progress') {
const keys = _.keys(_.pick(data, ['amount', 'days', 'memberRate', 'customerRate', 'billingAccountId']))
if (keys.length) {
throw new errors.BadRequestError(`${JSON.stringify(keys)} cannot be updated when workPeriodPayment status is in-progress`)
}
}
if (data.status === 'cancelled' && oldValue.status === 'in-progress') {
throw new errors.BadRequestError('You cannot cancel a WorkPeriodPayment which is in-progress')
}
if (data.status === 'scheduled') {
if (oldValue.status !== 'failed') {
throw new errors.BadRequestError(`You cannot schedule a WorkPeriodPayment which is ${oldValue.status}`)
}
const workPeriod = await WorkPeriod.findById(workPeriodPayment.workPeriodId)
// we con't check if paymentStatus is 'completed'
// because paymentStatus can be in-progress when daysWorked = daysPaid
if (workPeriod.daysWorked === workPeriod.daysPaid) {
throw new errors.BadRequestError('There is no available daysWorked to schedule a payment')
}
}
if (data.days) {
const correspondingWorkPeriod = await helper.ensureWorkPeriodById(workPeriodPayment.workPeriodId) // ensure work period exists
const maxPossibleDays = correspondingWorkPeriod.daysWorked - (correspondingWorkPeriod.daysPaid -
(_.includes(ActiveWorkPeriodPaymentStatuses, oldValue.status) ? oldValue.days : 0))
if (data.days > maxPossibleDays) {
throw new errors.BadRequestError(`Cannot update days paid to more than ${maxPossibleDays}, otherwise total paid days (${correspondingWorkPeriod.daysPaid -
(_.includes(ActiveWorkPeriodPaymentStatuses, oldValue.status) ? oldValue.days : 0)}) would be more that total worked days (${correspondingWorkPeriod.daysWorked}) for the week.`)
}
}
// challengeId exist and skip dummy challenge
if (oldValue.challengeId && oldValue.challengeId !== '00000000-0000-0000-0000-000000000000') {
await _updateChallenge(workPeriodPayment.challengeId, data)
}
const key = `workPeriodPayment.billingAccountId:${workPeriodPayment.billingAccountId}`
let entity
try {
await sequelize.transaction(async (t) => {
const updated = await workPeriodPayment.update(data, { transaction: t })
entity = updated.toJSON()
await processUpdate({ ...entity, key })
})
} catch (e) {
if (entity) {
helper.postErrorEvent(config.TAAS_ERROR_TOPIC, entity, 'workperiodpayment.update')
}
throw e
}
await helper.postEvent(config.TAAS_WORK_PERIOD_PAYMENT_UPDATE_TOPIC, entity, { oldValue: oldValue, key })
return entity
}
/**
* Partially update workPeriodPayment by id
* @param {Object} currentUser the user who perform this operation
* @param {String} id the workPeriodPayment id
* @param {Object} data the data to be updated
* @returns {Object} the updated workPeriodPayment
*/
async function partiallyUpdateWorkPeriodPayment (currentUser, id, data) {
// check permission
_checkUserPermissionForCRUWorkPeriodPayment(currentUser)
data.updatedBy = await helper.getUserId(currentUser.userId)
return updateWorkPeriodPayment(id, data)
}
const updateWorkPeriodPaymentSchema = Joi.object().keys({
status: Joi.workPeriodPaymentUpdateStatus(),
amount: Joi.number().greater(0),
days: Joi.number().integer().min(0).max(10),
memberRate: Joi.number().positive(),
customerRate: Joi.number().positive().allow(null),
billingAccountId: Joi.number().positive().integer()
}).min(1).required()
partiallyUpdateWorkPeriodPayment.schema = Joi.object().keys({
currentUser: Joi.object().required(),
id: Joi.string().uuid().required(),
data: updateWorkPeriodPaymentSchema
}).required()
/**
* Partially update workPeriodPayment in bulk
* @param {Object} currentUser the user who perform this operation
* @param {Array<Object>} workPeriodPayments the workPeriodPayments data to be updated
* @returns {Array<Object>} the updated workPeriodPayment
*/
async function updateBulkOfWorkPeriodPayments (currentUser, workPeriodPayments) {
// check permission
_checkUserPermissionForCRUWorkPeriodPayment(currentUser)
const updatedBy = await helper.getUserId(currentUser.userId)
const result = []
for (const wpp of workPeriodPayments) {
try {
const successResult = await updateWorkPeriodPayment(wpp.id, _.assign(_.omit(wpp, 'id'), { updatedBy }))
result.push(successResult)
} catch (e) {
result.push(_.assign(wpp, { error: { message: e.message, code: e.httpStatus } }))
}
}
return result
}
updateBulkOfWorkPeriodPayments.schema = Joi.object().keys({
currentUser: Joi.object().required(),
workPeriodPayments: Joi.array().min(1).items(
updateWorkPeriodPaymentSchema.keys({
id: Joi.string().uuid().required()
}).min(2).required()
).required()
}).required()
/**
* List workPeriodPayments
* @param {Object} currentUser the user who perform this operation.
* @param {Object} criteria the search criteria
* @param {Object} options the extra options to control the function
* @returns {Object} the search result, contain total/page/perPage and result array
*/
async function searchWorkPeriodPayments (currentUser, criteria, options = { returnAll: false }) {
// check user permission
_checkUserPermissionForCRUWorkPeriodPayment(currentUser)
if ((typeof criteria.workPeriodIds) === 'string') {
criteria.workPeriodIds = criteria.workPeriodIds.trim().split(',').map(workPeriodIdRaw => {
const workPeriodId = workPeriodIdRaw.trim()
if (!uuid.validate(workPeriodId)) {
throw new errors.BadRequestError(`workPeriodId ${workPeriodId} is not a valid uuid`)
}
return workPeriodId
})
}
const page = criteria.page
const perPage = criteria.perPage
try {
const esQuery = {
index: config.get('esConfig.ES_INDEX_RESOURCE_BOOKING'),
_source: 'workPeriods.payments',
body: {
query: {
nested: {
path: 'workPeriods.payments',
query: { bool: { must: [] } }
}
},
size: 10000
// We use a very large number for size, because we can't paginate nested documents
// and in practice there could hardly be so many records to be returned.(also consider we are using filters in the meantime)
// the number is limited by `index.max_result_window`, its default value is 10000, see
// https://www.elastic.co/guide/en/elasticsearch/reference/current/index-modules.html#index-max-result-window
}
}
_.each(_.pick(criteria, ['status', 'workPeriodId']), (value, key) => {
esQuery.body.query.nested.query.bool.must.push({
term: {
[`workPeriods.payments.${key}`]: {
value
}
}
})
})
if (criteria.workPeriodIds) {
esQuery.body.query.nested.query.bool.filter = [{
terms: {
'workPeriods.payments.workPeriodId': criteria.workPeriodIds
}
}]
}
logger.debug({ component: 'WorkPeriodPaymentService', context: 'searchWorkPeriodPayment', message: `Query: ${JSON.stringify(esQuery)}` })
const { body } = await esClient.search(esQuery)
const workPeriods = _.reduce(body.hits.hits, (acc, resourceBooking) => _.concat(acc, resourceBooking._source.workPeriods), [])
let payments = _.reduce(workPeriods, (acc, workPeriod) => _.concat(acc, workPeriod.payments), [])
if (criteria.workPeriodId) {
payments = _.filter(payments, { workPeriodId: criteria.workPeriodId })
} else if (criteria.workPeriodIds) {
payments = _.filter(payments, p => _.includes(criteria.workPeriodIds, p.workPeriodId))
}
if (criteria.status) {
payments = _.filter(payments, { status: criteria.status })
}
payments = _.sortBy(payments, [criteria.sortBy])
if (criteria.sortOrder === 'desc') {
payments = _.reverse(payments)
}
const total = payments.length
if (!options.returnAll) {
payments = _.slice(payments, (page - 1) * perPage, page * perPage)
}
return {
total,
page,
perPage,
result: payments
}
} catch (err) {
logger.logFullError(err, { component: 'WorkPeriodPaymentService', context: 'searchWorkPeriodPaymentService' })
}
logger.info({ component: 'WorkPeriodPaymentService', context: 'searchWorkPeriodPayments', message: 'fallback to DB query' })
const filter = { [Op.and]: [] }
_.each(_.pick(criteria, ['status', 'workPeriodId']), (value, key) => {
filter[Op.and].push({ [key]: value })
})
if (criteria.workPeriodIds) {
filter[Op.and].push({ workPeriodId: criteria.workPeriodIds })
}
const workPeriodPayments = await WorkPeriodPayment.findAll({
where: filter,
offset: ((page - 1) * perPage),
limit: perPage,
order: [[criteria.sortBy, criteria.sortOrder]]
})
const total = await WorkPeriodPayment.count({ where: filter })
return {
fromDb: true,
total,
page,
perPage,
result: workPeriodPayments
}
}
searchWorkPeriodPayments.schema = Joi.object().keys({
currentUser: Joi.object().required(),
criteria: Joi.object().keys({
page: Joi.number().integer().min(1).default(1),
perPage: Joi.number().integer().min(1).max(10000).default(20),
sortBy: Joi.string().valid('status', 'amount', 'createdAt', 'updatedAt').default('createdAt'),
sortOrder: Joi.string().valid('desc', 'asc').default('desc'),
status: Joi.workPeriodPaymentStatus(),
workPeriodId: Joi.string().uuid(),
workPeriodIds: Joi.alternatives(
Joi.string(),
Joi.array().items(Joi.string().uuid())
)
}).required(),
options: Joi.object()
}).required()
/**
* Create all query workPeriodPayments
* @param {Object} currentUser the user who perform this operation.
* @param {Object} criteria the query criteria
* @returns {Object} the process result
*/
async function createQueryWorkPeriodPayments (currentUser, criteria) {
// check permission
_checkUserPermissionForCRUWorkPeriodPayment(currentUser)
// Joi validation normalizes the dates back to ISO format
// so, we need to change the date format back to YYYY-MM-DD
if (criteria.query.startDate) {
criteria.query.startDate = moment(criteria.query.startDate).format('YYYY-MM-DD')
}
if (criteria.query.endDate) {
criteria.query.endDate = moment(criteria.query.endDate).format('YYYY-MM-DD')
}
if (criteria.query['workPeriods.startDate']) {
criteria.query['workPeriods.startDate'] = moment(criteria.query['workPeriods.startDate']).format('YYYY-MM-DD')
}
if (criteria.query['workPeriods.endDate']) {
criteria.query['workPeriods.endDate'] = moment(criteria.query['workPeriods.endDate']).format('YYYY-MM-DD')
}
// save query to return back
const rawQuery = _.cloneDeep(criteria.query)
const createdBy = await helper.getUserId(currentUser.userId)
const query = criteria.query
if ((typeof query['workPeriods.paymentStatus']) === 'string') {
query['workPeriods.paymentStatus'] = query['workPeriods.paymentStatus'].trim().split(',').map(ps => Joi.attempt({ paymentStatus: ps.trim() }, Joi.object().keys({ paymentStatus: Joi.paymentStatus() })).paymentStatus)
}
const fields = _.join(_.uniq(_.concat(
['id', 'billingAccountId', 'memberRate', 'customerRate', 'workPeriods.id', 'workPeriods.resourceBookingId', 'workPeriods.daysWorked', 'workPeriods.daysPaid'],
_.map(_.keys(query), k => k === 'projectIds' ? 'projectId' : k))
), ',')
const searchResult = await searchResourceBookings(currentUser, _.extend({ fields, page: 1 }, query), { returnAll: true, returnFromDB: true })
const wpArray = _.flatMap(searchResult.result, 'workPeriods')
const resourceBookingMap = _.fromPairs(_.map(searchResult.result, rb => [rb.id, rb]))
const result = { total: wpArray.length, query: rawQuery, totalSuccess: 0, totalError: 0 }
for (const wp of wpArray) {
try {
await _createSingleWorkPeriodPaymentWithWorkPeriodAndResourceBooking({ workPeriodId: wp.id }, createdBy, wp, resourceBookingMap[wp.resourceBookingId])
result.totalSuccess++
} catch (err) {
logger.logFullError(err, { component: 'WorkPeriodPaymentService', context: 'createQueryWorkPeriodPayments' })
result.totalError++
}
}
return result
}
createQueryWorkPeriodPayments.schema = Joi.object().keys({
currentUser: Joi.object().required(),
criteria: Joi.object().keys({
query: Joi.object().keys({
status: Joi.resourceBookingStatus(),
startDate: Joi.date().format('YYYY-MM-DD'),
endDate: Joi.date().format('YYYY-MM-DD'),
rateType: Joi.rateType(),
jobId: Joi.string().uuid(),
userId: Joi.string().uuid(),
projectId: Joi.number().integer(),
projectIds: Joi.alternatives(
Joi.string(),
Joi.array().items(Joi.number().integer())
),
'workPeriods.paymentStatus': Joi.alternatives(
Joi.string(),
Joi.array().items(Joi.string().valid(Joi.paymentStatus()))
),
'workPeriods.startDate': Joi.date().format('YYYY-MM-DD'),
'workPeriods.endDate': Joi.date().format('YYYY-MM-DD'),
'workPeriods.userHandle': Joi.string()
}).required()
}).required()
}).required()
module.exports = {
getWorkPeriodPayment,
createWorkPeriodPayment,
createBulkOfWorkPeriodPayments,
createQueryWorkPeriodPayments,
partiallyUpdateWorkPeriodPayment,
updateBulkOfWorkPeriodPayments,
searchWorkPeriodPayments
}
<file_sep>const config = require('config')
/*
* Make ResourceBooking fields startDate, endDate, memberRate and customerRate optional.
*/
const targetFields = ['start_date', 'end_date', 'member_rate', 'customer_rate']
module.exports = {
up: queryInterface => {
return Promise.all(targetFields.map(field =>
queryInterface.sequelize.query(`ALTER TABLE bookings.resource_bookings ALTER COLUMN ${field} DROP NOT NULL`)
))
},
down: queryInterface => {
return Promise.all(targetFields.map(field =>
queryInterface.sequelize.query(`ALTER TABLE bookings.resource_bookings ALTER COLUMN ${field} SET NOT NULL`)
))
}
}
module.exports = {
up: async (queryInterface, Sequelize) => {
const transaction = await queryInterface.sequelize.transaction()
try {
await queryInterface.changeColumn({ tableName: 'resource_bookings', schema: config.DB_SCHEMA_NAME }, 'start_date',
{ type: Sequelize.DATE, allowNull: true },
{ transaction })
await queryInterface.changeColumn({ tableName: 'resource_bookings', schema: config.DB_SCHEMA_NAME }, 'end_date',
{ type: Sequelize.DATE, allowNull: true }
, { transaction })
await queryInterface.changeColumn({ tableName: 'resource_bookings', schema: config.DB_SCHEMA_NAME }, 'member_rate',
{ type: Sequelize.FLOAT, allowNull: true }
, { transaction })
await queryInterface.changeColumn({ tableName: 'resource_bookings', schema: config.DB_SCHEMA_NAME }, 'customer_rate',
{ type: Sequelize.FLOAT, allowNull: true }
, { transaction })
await transaction.commit()
} catch (err) {
await transaction.rollback()
throw err
}
},
down: async (queryInterface, Sequelize) => {
const transaction = await queryInterface.sequelize.transaction()
try {
await queryInterface.changeColumn({ tableName: 'resource_bookings', schema: config.DB_SCHEMA_NAME }, 'start_date',
{ type: Sequelize.DATE, allowNull: false },
{ transaction })
await queryInterface.changeColumn({ tableName: 'resource_bookings', schema: config.DB_SCHEMA_NAME }, 'end_date',
{ type: Sequelize.DATE, allowNull: false }
, { transaction })
await queryInterface.changeColumn({ tableName: 'resource_bookings', schema: config.DB_SCHEMA_NAME }, 'member_rate',
{ type: Sequelize.FLOAT, allowNull: false }
, { transaction })
await queryInterface.changeColumn({ tableName: 'resource_bookings', schema: config.DB_SCHEMA_NAME }, 'customer_rate',
{ type: Sequelize.FLOAT, allowNull: false }
, { transaction })
await transaction.commit()
} catch (err) {
await transaction.rollback()
throw err
}
}
}
<file_sep>/**
* Reindex all data in Elasticsearch using data from database
*/
const config = require('config')
const { Interview, WorkPeriod, WorkPeriodPayment } = require('../../src/models')
const logger = require('../../src/common/logger')
const helper = require('../../src/common/helper')
const userPrompt = 'WARNING: this would remove existent data! Are you sure want to reindex all indices?'
const jobCandidateModelOpts = {
modelName: 'JobCandidate',
include: [{
model: Interview,
as: 'interviews'
}]
}
const resourceBookingModelOpts = {
modelName: 'ResourceBooking',
include: [{
model: WorkPeriod,
as: 'workPeriods',
include: [{
model: WorkPeriodPayment,
as: 'payments'
}]
}]
}
async function indexAll () {
await helper.promptUser(userPrompt, async () => {
try {
await helper.indexBulkDataToES('Job', config.get('esConfig.ES_INDEX_JOB'), logger)
await helper.indexBulkDataToES(jobCandidateModelOpts, config.get('esConfig.ES_INDEX_JOB_CANDIDATE'), logger)
await helper.indexBulkDataToES(resourceBookingModelOpts, config.get('esConfig.ES_INDEX_RESOURCE_BOOKING'), logger)
await helper.indexBulkDataToES('Role', config.get('esConfig.ES_INDEX_ROLE'), logger)
process.exit(0)
} catch (err) {
logger.logFullError(err, { component: 'indexAll' })
process.exit(1)
}
})
}
indexAll()
<file_sep>/**
* Reindex Jobs data in Elasticsearch using data from database
*/
const config = require('config')
const logger = require('../../src/common/logger')
const helper = require('../../src/common/helper')
const jobId = helper.getParamFromCliArgs()
const index = config.get('esConfig.ES_INDEX_JOB')
const reIndexAllJobsPrompt = `WARNING: this would remove existent data! Are you sure you want to reindex the index ${index}?`
const reIndexJobPrompt = `WARNING: this would remove existent data! Are you sure you want to reindex the document with id ${jobId} in index ${index}?`
async function reIndexJobs () {
if (jobId === null) {
await helper.promptUser(reIndexAllJobsPrompt, async () => {
try {
await helper.indexBulkDataToES('Job', index, logger)
process.exit(0)
} catch (err) {
logger.logFullError(err, { component: 'reIndexJobs' })
process.exit(1)
}
})
} else {
await helper.promptUser(reIndexJobPrompt, async () => {
try {
await helper.indexDataToEsById(jobId, 'Job', index, logger)
process.exit(0)
} catch (err) {
logger.logFullError(err, { component: 'reIndexJobs' })
process.exit(1)
}
})
}
}
reIndexJobs()
<file_sep>/**
* This service provides operations of Job.
*/
const _ = require('lodash')
const Joi = require('joi')
const dateFNS = require('date-fns')
const config = require('config')
const helper = require('../common/helper')
const logger = require('../common/logger')
const errors = require('../common/errors')
const eventDispatcher = require('../common/eventDispatcher')
const JobService = require('./JobService')
const ResourceBookingService = require('./ResourceBookingService')
const HttpStatus = require('http-status-codes')
const { Op } = require('sequelize')
const models = require('../models')
const stopWords = require('../../data/stopWords.json')
const { getAuditM2Muser } = require('../common/helper')
const { matchedSkills, unMatchedSkills } = require('../../scripts/emsi-mapping/esmi-skills-mapping')
const Role = models.Role
const RoleSearchRequest = models.RoleSearchRequest
const stripe = require('stripe')(config.STRIPE_SECRET_KEY, { maxNetworkRetries: 5 })
const emailTemplates = helper.getEmailTemplatesForKey('teamTemplates')
/**
* Function to get placed resource bookings with specific projectIds
* @param {Object} currentUser the user who perform this operation.
* @param {Array} projectIds project ids
* @returns the request result
*/
async function _getPlacedResourceBookingsByProjectIds (currentUser, projectIds) {
const criteria = { status: 'placed', projectIds }
const { result } = await ResourceBookingService.searchResourceBookings(
currentUser,
criteria,
{ returnAll: true }
)
return result
}
/**
* Function to get jobs by projectIds
* @param {Object} currentUser the user who perform this operation.
* @param {Array} projectIds project ids
* @returns the request result
*/
async function _getJobsByProjectIds (currentUser, projectIds) {
const { result } = await JobService.searchJobs(
currentUser,
{ projectIds },
{ returnAll: true }
)
return result
}
/**
* compiled regex patters according to Levenshtein distance <=1 for unmatched skills from EMSI
* @returns {Array} the unMatched skills with regex pattern
*/
function compileRegexPatternForNoEmsiSkills () {
const unMatched = []
// store skill names and compiled regex paterns
_.each(unMatchedSkills, skill => {
unMatched.push({
name: skill.toLowerCase(),
pattern: _compileRegexPatternForSkillName(skill.toLowerCase())
})
})
return unMatched
}
/**
* Prepares the regex pattern for the given word
* according to Levenshtein distance of 1 (insertions, deletions or substitutions)
* @param {String} skillName the name of the skill
* @returns {RegExp} the compiled regex pattern
*/
function _compileRegexPatternForSkillName (skillName) {
// split the name into its chars
let chars = _.split(skillName, '')
// escape characters reserved to regex
chars = _.map(chars, _.escapeRegExp)
// Its not a good idea to apply tolerance according to
// Levenshtein distance for the words have less than 3 letters
// We expect the skill names have 1 or 2 letters to take place
// in job description as how they are exactly spelled
if (chars.length < 3) {
return new RegExp(`^(?:${_.join(chars, '')})$`, 'i')
}
const res = []
// include the skill name itself
res.push(_.join(chars, ''))
// include every transposition combination
// E.g. java => ajva, jvaa, jaav
for (let i = 0; i < chars.length - 1; i++) {
res.push(_.join(_.slice(chars, 0, i), '') + chars[i + 1] + chars[i] + _.join(_.slice(chars, i + 2), ''))
}
// include every insertion combination
// E.g. java => .java, j.ava, ja.va, jav.a, java.
for (let i = 0; i <= chars.length; i++) {
res.push(_.join(_.slice(chars, 0, i), '') + '.' + _.join(_.slice(chars, i), ''))
}
// include every deletion/substitution combination
// E.g. java => .?ava, j.?va, ja.?a, jav.?
for (let i = 0; i < chars.length; i++) {
res.push(_.join(_.slice(chars, 0, i), '') + '.?' + _.join(_.slice(chars, i + 1), ''))
}
// return the regex pattern
return new RegExp(`^(?:${_.join(res, '|')})$`, 'i')
}
/**
* List teams
* @param {Object} currentUser the user who perform this operation
* @param {Object} criteria the search criteria
* @returns {Object} the search result, contain total/page/perPage and result array
*/
async function searchTeams (currentUser, criteria) {
const sort = `${criteria.sortBy} ${criteria.sortOrder}`
// Get projects from /v5/projects with searching criteria
const {
total,
page,
perPage,
result: projects
} = await helper.getProjects(currentUser, {
page: criteria.page,
perPage: criteria.perPage,
name: criteria.name,
sort
})
return {
total,
page,
perPage,
result: await getTeamDetail(currentUser, projects)
}
}
searchTeams.schema = Joi.object()
.keys({
currentUser: Joi.object().required(),
criteria: Joi.object()
.keys({
page: Joi.page(),
perPage: Joi.perPage(),
sortBy: Joi.string()
.valid(
'createdAt',
'updatedAt',
'lastActivityAt',
'id',
'status',
'name',
'type',
'best match'
)
.default('lastActivityAt'),
sortOrder: Joi.when('sortBy', {
is: 'best match',
then: Joi.forbidden().label(
'sortOrder(with sortBy being `best match`)'
),
otherwise: Joi.string().valid('asc', 'desc').default('desc')
}),
name: Joi.string()
})
.required()
})
.required()
/**
* Get team details
* @param {Object} currentUser the user who perform this operation
* @param {Object} projects the projects
* @param {Object} isSearch the flag whether for search function
* @returns {Object} the search result
*/
async function getTeamDetail (currentUser, projects, isSearch = true) {
const projectIds = _.map(projects, 'id')
// Get all placed resourceBookings filtered by projectIds
const resourceBookings = await _getPlacedResourceBookingsByProjectIds(
currentUser,
projectIds
)
// Get all jobs filtered by projectIds
const jobs = await _getJobsByProjectIds(currentUser, projectIds)
// Get first week day and last week day
const curr = new Date()
const firstDay = dateFNS.startOfWeek(curr)
const lastDay = dateFNS.endOfWeek(curr)
logger.debug({
component: 'TeamService',
context: 'getTeamDetail',
message: `week started: ${firstDay}, week ended: ${lastDay}`
})
const result = []
for (const project of projects) {
const rbs = _.filter(resourceBookings, { projectId: project.id })
const res = _.clone(project)
res.weeklyCost = 0
res.resources = []
if (rbs && rbs.length > 0) {
// Get minimal start date and maximal end date
const startDates = []
const endDates = []
for (const rbsItem of rbs) {
if (rbsItem.startDate) {
startDates.push(new Date(rbsItem.startDate))
}
if (rbsItem.endDate) {
endDates.push(new Date(rbsItem.endDate))
}
}
if (startDates && startDates.length > 0) {
res.startDate = _.min(startDates)
}
if (endDates && endDates.length > 0) {
res.endDate = _.max(endDates)
}
// Count weekly rate
for (const item of rbs) {
// ignore any resourceBooking that has customerRate missed
if (!item.customerRate) {
continue
}
const startDate = new Date(item.startDate)
const endDate = new Date(item.endDate)
// normally startDate is smaller than endDate for a resourceBooking so not check if startDate < endDate
if (
(!item.startDate || startDate < lastDay) &&
(!item.endDate || endDate > firstDay)
) {
res.weeklyCost += item.customerRate
}
}
const resourceInfos = await Promise.all(
_.map(rbs, (rb) => {
return helper.getUserById(rb.userId, true).then((user) => {
const resource = {
id: rb.id,
userId: user.id,
..._.pick(user, ['handle', 'firstName', 'lastName', 'skills'])
}
// If call function is not search, add jobId field
if (!isSearch) {
resource.jobId = rb.jobId
resource.customerRate = rb.customerRate
resource.startDate = rb.startDate
resource.endDate = rb.endDate
}
return resource
})
})
)
if (resourceInfos && resourceInfos.length > 0) {
res.resources = resourceInfos
const userHandles = _.map(resourceInfos, 'handle')
// Get user photo from /v5/members
const members = await helper.getMembers(userHandles)
for (const item of res.resources) {
const findMember = _.find(members, {
handleLower: item.handle.toLowerCase()
})
if (findMember && findMember.photoURL) {
item.photo_url = findMember.photoURL
}
}
}
}
const jobsTmp = _.filter(jobs, { projectId: project.id })
if (jobsTmp && jobsTmp.length > 0) {
if (isSearch) {
// Count total positions
res.totalPositions = 0
for (const item of jobsTmp) {
// only sum numPositions of jobs whose status is NOT cancelled or closed
if (['cancelled', 'closed'].includes(item.status)) {
continue
}
res.totalPositions += item.numPositions
}
} else {
res.jobs = _.map(jobsTmp, (job) => {
return _.pick(job, [
'id',
'description',
'startDate',
'duration',
'numPositions',
'rateType',
'skills',
'customerRate',
'status',
'title'
])
})
}
}
result.push(res)
}
return result
}
/**
* Get team by id
* @param {Object} currentUser the user who perform this operation
* @param {String} id the job id
* @returns {Object} the team
*/
async function getTeam (currentUser, id) {
const project = await helper.getProjectById(currentUser, id)
const result = await getTeamDetail(currentUser, [project], false)
const teamDetail = result[0]
// add job skills for result
let jobSkills = []
if (teamDetail && teamDetail.jobs) {
for (const job of teamDetail.jobs) {
if (job.skills) {
const usersPromises = []
_.map(job.skills, (skillId) => {
usersPromises.push(helper.getSkillById(skillId))
})
jobSkills = await Promise.all(usersPromises)
job.skills = jobSkills
}
}
}
return teamDetail
}
getTeam.schema = Joi.object()
.keys({
currentUser: Joi.object().required(),
id: Joi.number().integer().required()
})
.required()
/**
* Get team job with id
* @param {Object} currentUser the user who perform this operation
* @param {String} id the team id
* @param {String} jobId the job id
* @returns the team job
*/
async function getTeamJob (currentUser, id, jobId) {
const project = await helper.getProjectById(currentUser, id)
const jobs = await _getJobsByProjectIds(currentUser, [project.id])
const job = _.find(jobs, { id: jobId })
if (!job) {
throw new errors.NotFoundError(
`id: ${jobId} "Job" with Team id ${id} doesn't exist`
)
}
const result = {
id: job.id,
title: job.title
}
if (job.skills) {
result.skills = await Promise.all(
_.map(job.skills, (skillId) => helper.getSkillById(skillId))
)
}
// If the job has candidates, the following data for each candidate would be populated:
//
// - the `status`, `resume`, `userId` and `id` of the candidate
// - the `handle`, `firstName` `lastName` and `skills` of the user(from GET /users/:userId) for the candidate
// - the `photoURL` of the member(from GET /members) for the candidate
//
if (job && job.candidates && job.candidates.length > 0) {
// find user data for candidates
const users = await Promise.all(
_.map(_.uniq(_.map(job.candidates, 'userId')), (userId) =>
helper.getUserById(userId, true)
)
)
const userMap = _.groupBy(users, 'id')
// find photo URLs for users
const members = await helper.getMembers(_.map(users, 'handle'))
const photoURLMap = _.groupBy(members, 'handleLower')
result.candidates = _.map(job.candidates, (candidate) => {
const candidateData = _.pick(candidate, [
'status',
'resume',
'userId',
'interviews',
'id'
])
const userData = userMap[candidate.userId][0]
// attach user data to the candidate
Object.assign(
candidateData,
_.pick(userData, ['handle', 'firstName', 'lastName', 'skills'])
)
// attach photo URL to the candidate
const handleLower = userData.handle.toLowerCase()
if (photoURLMap[handleLower]) {
candidateData.photo_url = photoURLMap[handleLower][0].photoURL
}
return candidateData
})
}
return result
}
getTeamJob.schema = Joi.object()
.keys({
currentUser: Joi.object().required(),
id: Joi.number().integer().required(),
jobId: Joi.string().guid().required()
})
.required()
/**
* Send email through a particular template
* @param {Object} currentUser the user who perform this operation
* @param {Object} data the email object
* @returns {undefined}
*/
async function sendEmail (currentUser, data) {
const template = emailTemplates[data.template]
const dataCC = data.cc || []
const templateCC = template.cc || []
const dataRecipients = data.recipients || []
const templateRecipients = template.recipients || []
const subjectBody = {
subject: data.subject || template.subject,
body: data.body || template.body
}
for (const key in subjectBody) {
subjectBody[key] = helper.substituteStringByObject(
subjectBody[key],
data.data
)
}
const emailData = {
// override template if coming data already have the 'from' address
from: data.from || template.from,
// create a set of uniq. recipients & CCs, from both coming data & template
recipients: _.uniq([...dataRecipients, ...templateRecipients]),
cc: _.uniq([...dataCC, ...templateCC]),
data: { ...data.data, ...subjectBody },
sendgrid_template_id: template.sendgridTemplateId,
version: 'v3'
}
await helper.postEvent(config.EMAIL_TOPIC, emailData)
}
sendEmail.schema = Joi.object()
.keys({
currentUser: Joi.object().required(),
data: Joi.object()
.keys({
template: Joi.string()
.valid(...Object.keys(emailTemplates))
.required(),
data: Joi.object().required(),
from: Joi.string().email(),
recipients: Joi.array().items(Joi.string().email()).allow(null),
cc: Joi.array().items(Joi.string().email()).allow(null)
})
.required()
})
.required()
/**
* Add a member to a team as customer.
*
* @param {Number} projectId project id
* @param {String} userId user id
* @param {String} fields the fields to be returned
* @returns {Object} the member added
*/
async function _addMemberToProjectAsCustomer (projectId, userId, fields) {
try {
const member = await helper.createProjectMember(
projectId,
{ userId: userId, role: 'customer' },
{ fields }
)
return member
} catch (err) {
err.message = _.get(err, 'response.body.message') || err.message
if (err.message && err.message.includes('User already registered')) {
throw new Error('User is already added')
}
logger.error({
component: 'TeamService',
context: '_addMemberToProjectAsCustomer',
message: err.message
})
throw err
}
}
/**
* Add members to a team by handle or email.
* @param {Object} currentUser the user who perform this operation
* @param {String} id the team id
* @params {Object} criteria the search criteria
* @param {Object} data the object including members with handle/email to be added
* @returns {Object} the success/failed added members
*/
async function addMembers (currentUser, id, criteria, data) {
await helper.getProjectById(currentUser, id) // check whether the user can access the project
const result = {
success: [],
failed: []
}
const handles = data.handles || []
const emails = data.emails || []
const handleMembers = await helper
.getMemberDetailsByHandles(handles)
.then((members) =>
_.map(members, (member) => ({
...member,
// populate members with lower-cased handle for case insensitive search
handleLowerCase: member.handle.toLowerCase()
}))
)
const emailMembers = await helper
.getMemberDetailsByEmails(emails)
.then((members) =>
_.map(members, (member) => ({
...member,
// populate members with lower-cased email for case insensitive search
emailLowerCase: member.email.toLowerCase()
}))
)
await Promise.all([
Promise.all(
handles.map((handle) => {
const memberDetails = _.find(handleMembers, {
handleLowerCase: handle.toLowerCase()
})
if (!memberDetails) {
result.failed.push({ error: "User doesn't exist", handle })
return
}
return _addMemberToProjectAsCustomer(
id,
memberDetails.userId,
criteria.fields
)
.then((member) => {
// note, that we return `handle` in the same case it was in request
result.success.push({ ...member, handle })
})
.catch((err) => {
result.failed.push({ error: err.message, handle })
})
})
),
Promise.all(
emails.map((email) => {
const memberDetails = _.find(emailMembers, {
emailLowerCase: email.toLowerCase()
})
if (!memberDetails) {
result.failed.push({ error: "User doesn't exist", email })
return
}
return _addMemberToProjectAsCustomer(
id,
memberDetails.id,
criteria.fields
)
.then((member) => {
// note, that we return `email` in the same case it was in request
result.success.push({ ...member, email })
})
.catch((err) => {
result.failed.push({ error: err.message, email })
})
})
)
])
return result
}
addMembers.schema = Joi.object()
.keys({
currentUser: Joi.object().required(),
id: Joi.number().integer().required(),
criteria: Joi.object()
.keys({
fields: Joi.string()
})
.required(),
data: Joi.object()
.keys({
handles: Joi.array().items(Joi.string()),
emails: Joi.array().items(Joi.string().email())
})
.or('handles', 'emails')
.required()
})
.required()
/**
* Search members in a team.
* Serves as a proxy endpoint for `GET /projects/{projectId}/members`.
*
* @param {Object} currentUser the user who perform this operation.
* @param {String} id the team id
* @params {Object} criteria the search criteria
* @returns {Object} the search result
*/
async function searchMembers (currentUser, id, criteria) {
const result = await helper.listProjectMembers(currentUser, id, criteria)
return { result }
}
searchMembers.schema = Joi.object()
.keys({
currentUser: Joi.object().required(),
id: Joi.number().integer().required(),
criteria: Joi.object()
.keys({
role: Joi.string(),
fields: Joi.string()
})
.required()
})
.required()
/**
* Search member invites for a team.
* Serves as a proxy endpoint for `GET /projects/{projectId}/invites`.
*
* @param {Object} currentUser the user who perform this operation.
* @param {String} id the team id
* @params {Object} criteria the search criteria
* @returns {Object} the search result
*/
async function searchInvites (currentUser, id, criteria) {
const result = await helper.listProjectMemberInvites(
currentUser,
id,
criteria
)
return { result }
}
searchInvites.schema = Joi.object()
.keys({
currentUser: Joi.object().required(),
id: Joi.number().integer().required(),
criteria: Joi.object()
.keys({
fields: Joi.string()
})
.required()
})
.required()
/**
* Remove a member from a team.
* Serves as a proxy endpoint for `DELETE /projects/{projectId}/members/{id}`.
*
* @param {Object} currentUser the user who perform this operation.
* @param {String} id the team id
* @param {String} projectMemberId the id of the project member
* @returns {undefined}
*/
async function deleteMember (currentUser, id, projectMemberId) {
await helper.deleteProjectMember(currentUser, id, projectMemberId)
}
deleteMember.schema = Joi.object()
.keys({
currentUser: Joi.object().required(),
id: Joi.number().integer().required(),
projectMemberId: Joi.number().integer().required()
})
.required()
/**
* Return details about the current user.
*
* @param {Object} currentUser the user who perform this operation.
* @params {Object} criteria the search criteria
* @returns {Object} the user data for current user
*/
async function getMe (currentUser) {
return helper.getUserByExternalId(currentUser.userId)
}
getMe.schema = Joi.object()
.keys({
currentUser: Joi.object().required()
})
.required()
/**
* Searches roles either by roleId or jobDescription or skills
* @param {Object} currentUser the user performing the operation.
* @param {Object} data search request data
* @returns {Object} the created project
*/
async function roleSearchRequest (currentUser, data) {
// if currentUser is undefined then set to machine
if (_.isUndefined(currentUser)) {
currentUser = getAuditM2Muser()
}
let role
// if roleId is provided then find role with given id.
if (!_.isUndefined(data.roleId)) {
role = await Role.findById(data.roleId)
role = role.toJSON()
role.matchedSkills = role.listOfSkills
role.unMatchedSkills = []
role.skillsMatch = 1
// if skills is provided then use skills to find role
} else if (!_.isUndefined(data.skills)) {
// validate given skillIds and convert them into skill names
const skills = await getSkillNamesByIds(data.skills)
// find the best matching role
role = await getRoleBySkills(skills)
} else {
// if only job description is provided, collect skill names from description
const tags = await getSkillsByJobDescription({ description: data.jobDescription })
const skills = _.map(tags, 'tag')
// add skills to roleSearchRequest and get best matching role
const [skillIds, roleList] = await Promise.all([getSkillIdsByNames(skills), getRoleBySkills(skills)])
data.skills = skillIds
role = roleList
}
data.roleId = role.id
// create roleSearchRequest entity with found roleId
const { id: roleSearchRequestId, jobTitle } = await createRoleSearchRequest(currentUser, data)
const entity = jobTitle ? { jobTitle, roleSearchRequestId } : { roleSearchRequestId }
// clean Role
role = await _cleanRoleDTO(currentUser, role)
// return Role
return _.assign(role, entity)
}
roleSearchRequest.schema = Joi.object()
.keys({
currentUser: Joi.object(),
data: Joi.object().keys({
roleId: Joi.string().uuid(),
jobDescription: Joi.string().max(100000),
skills: Joi.array().items(Joi.string().uuid().required()),
jobTitle: Joi.string().max(100),
previousRoleSearchRequestId: Joi.string().uuid()
}).required().or('roleId', 'jobDescription', 'skills')
}).required()
/**
* Returns 1 role most relevant to the specified skills
* @param {Array<string>} skills the array of skill names
* @returns {Role} the best matching Role
*/
async function getRoleBySkills (skills) {
// find all roles which includes any of the given skills
logger.debug(`getRoleBySkills: ${JSON.stringify(skills)}`)
const queryCriteria = {
where: { listOfSkills: { [Op.overlap]: skills } },
raw: true
}
let roles = await Role.findAll(queryCriteria)
logger.debug(`find roles: ${JSON.stringify(roles)}`)
roles = _.filter(roles, role => _.find(role.rates, r => r.global && r.rate20Global && r.rate30Global))
if (roles.length > 0) {
let result = _.each(roles, role => {
// role matched skills list
role.matchedSkills = _.intersection(role.listOfSkills, skills)
role.unMatchedSkills = _.difference(skills, role.matchedSkills)
// calculate each found roles matching rate
role.skillsMatch = role.matchedSkills.length / skills.length
// each role can have multiple rates, get the maximum of global rates
role.maxGlobal = _.maxBy(role.rates, 'global').global
})
// sort roles by skillsMatch, global rate and name
result = _.orderBy(result, ['skillsMatch', 'maxGlobal', 'name'], ['desc', 'desc', 'asc'])
logger.debug(`after sorting result: ${JSON.stringify(result)}`)
if (result[0].skillsMatch >= config.ROLE_MATCHING_RATE) {
// return the 1st role
return _.omit(result[0], ['maxGlobal'])
}
}
// if no matching role found then return Custom role or empty object
const customRole = await Role.findOne({ where: { name: { [Op.iLike]: 'Custom' } }, raw: true }) || {}
logger.debug(`got custom role: ${JSON.stringify(customRole)}`)
customRole.rates[0].rate30Global = customRole.rates[0].global * 0.75
customRole.rates[0].rate20Global = customRole.rates[0].global * 0.5
return customRole
}
getRoleBySkills.schema = Joi.object()
.keys({
skills: Joi.array().items(Joi.string().required()).required()
}).required()
/**
* Return skills by job description from EMSI.
*
* @param {Object} currentUser the user who perform this operation.
* @param {Object} data the search criteria
* @returns {Object} the result
*/
async function getSkillsByJobDescription (data) {
// replace markdown tags with spaces
const description = helper.removeTextFormatting(data.description)
// get skill from emsi
const emsiTags = await helper.getTags(description)
// extract words from description
let words = _.split(description, ' ')
// remove stopwords from description
words = _.filter(words, word => stopWords.indexOf(word.toLowerCase()) === -1)
// include consecutive two word combinations
const twoWords = []
for (let i = 0; i < words.length - 1; i++) {
twoWords.push(`${words[i]} ${words[i + 1]}`)
}
words = _.concat(words, twoWords)
let foundSkills = []
// add emsi parsed skills
_.each(emsiTags, (t) => {
if (matchedSkills[t.tag]) {
foundSkills.push(matchedSkills[t.tag])
}
})
// unmatctched skill
const unMatchedTopcoderSkills = compileRegexPatternForNoEmsiSkills()
const result = []
// try to match each word with skill names
// using pre-compiled regex pattern
_.each(words, word => {
_.each(unMatchedTopcoderSkills, skill => {
// do not stop searching after a match in order to detect more lookalikes
if (skill.pattern.test(word)) {
foundSkills.push(skill.name)
}
// for suffix with 'js'
if (!word.endsWith('js') && skill.name.endsWith('js')) {
if (skill.pattern.test(word + 'js')) {
foundSkills.push(skill.name)
}
}
})
})
foundSkills = _.uniq(foundSkills)
const skillIds = await getSkillIdsByNames(foundSkills)
// apply desired template
_.each(foundSkills, (skillTag, idx) => {
result.push({
id: skillIds[idx],
tag: skillTag,
type: 'taas_skill',
source: 'taas-jd-parser'
})
})
return result
}
getSkillsByJobDescription.schema = Joi.object()
.keys({
data: Joi.object().keys({
description: Joi.string().required()
}).required()
}).required()
/**
* Validate given skillIds and return their names
*
* @param {Array<string>} skills the array of skill ids
* @returns {Array<string>} the array of skill names
*/
async function getSkillNamesByIds (skills) {
const responses = await Promise.all(
skills.map(
skill => helper.getSkillById(skill)
.then((skill) => {
return _.assign(skill, { found: true })
})
.catch(err => {
if (err.status !== HttpStatus.NOT_FOUND) {
throw err
}
return { found: false, skill }
})
)
)
const errResponses = responses.filter(res => !res.found)
if (errResponses.length) {
throw new errors.BadRequestError(`Invalid skills: [${errResponses.map(res => res.skill)}]`)
}
return _.map(responses, 'name')
}
getSkillNamesByIds.schema = Joi.object()
.keys({
skills: Joi.array().items(Joi.string().uuid().required()).required()
}).required()
/**
* Finds and returns the ids of given skill names
*
* @param {Array<string>} skills the array of skill names
* @returns {Array<string>} the array of skill ids
*/
async function getSkillIdsByNames (skills) {
const tcSkills = await helper.getAllTopcoderSkills({ name: _.join(skills, ',') })
// endpoint returns the partial matched skills
// we need to filter by exact match case insensitive
// const filteredSkills = _.filter(result, tcSkill => _.some(skills, skill => _.toLower(skill) === _.toLower(tcSkill.name)))
const matchedSkills = _.map(skills, skillTag => tcSkills.find(tcSkill => _.toLower(skillTag) === _.toLower(tcSkill.name)))
return _.map(matchedSkills, 'id')
}
getSkillIdsByNames.schema = Joi.object()
.keys({
skills: Joi.array().items(Joi.string().required()).required()
}).required()
/**
* Creates the role search request
*
* @param {Object} currentUser the user who perform this operation.
* @param {Object} roleSearchRequest the role search request
* @returns {RoleSearchRequest} the role search request entity
*/
async function createRoleSearchRequest (currentUser, roleSearchRequest) {
roleSearchRequest.createdBy = await helper.getUserId(currentUser.userId)
// if current user is not machine then it must be logged user
if (!currentUser.isMachine) {
roleSearchRequest.memberId = roleSearchRequest.createdBy
// find the previous search done by this user
const previous = await RoleSearchRequest.findOne({
where: {
memberId: roleSearchRequest.memberId
},
order: [['createdAt', 'DESC']]
})
if (previous) {
roleSearchRequest.previousRoleSearchRequestId = previous.id
}
}
const created = await RoleSearchRequest.create(roleSearchRequest)
return created.toJSON()
}
createRoleSearchRequest.schema = Joi.object()
.keys({
currentUser: Joi.object().required(),
roleSearchRequest: Joi.object().keys({
roleId: Joi.string().uuid(),
jobDescription: Joi.string().max(100000),
skills: Joi.array().items(Joi.string().uuid().required())
}).required().min(1)
}).required()
/**
* Exclude some fields from role if the user is external member
*
* @param {Object} currentUser the user who perform this operation.
* @param {Object} role the role object to be cleaned
* @returns {Object} the cleaned role
*/
async function _cleanRoleDTO (currentUser, role) {
// if current user is machine, it means user is not logged in
if (_.isNil(currentUser) || currentUser.isMachine || await isExternalMember(currentUser.userId)) {
role.isExternalMember = true
if (role.rates) {
role.rates = _.map(role.rates, rate =>
_.omit(rate, ['inCountry', 'offShore', 'niche', 'rate30InCountry', 'rate30OffShore', 'rate30Niche', 'rate20InCountry', 'rate20OffShore', 'rate20Niche']))
}
return role
}
role.isExternalMember = false
return role
}
/**
* Finds out if member is external member
*
* @param {number} memberId the external id of member
* @returns {boolean}
*/
async function isExternalMember (memberId) {
const groups = await helper.getMemberGroups(memberId)
return _.intersection(config.INTERNAL_MEMBER_GROUPS, groups).length === 0
}
isExternalMember.schema = Joi.object()
.keys({
memberId: Joi.number().required()
}).required()
/**
* @param {Object} currentUser the user performing the operation.
* @param {Object} data the team data
* @returns {Object} the created project id
*/
async function createTeam (currentUser, data) {
// before creating a project, we should validate the given roleSearchRequestIds
// because if some data is missing it would fail to create jobs.
const roleSearchRequests = await _validateRoleSearchRequests(_.map(data.positions, 'roleSearchRequestId'))
const projectRequestBody = {
name: data.teamName,
description: data.teamDescription,
type: 'talent-as-a-service',
details: {
positions: data.positions,
utm: {
code: data.refCode,
intakeSource: data.intakeSource
}
}
}
// create project with given data
const project = await helper.createProject(currentUser, projectRequestBody)
// create jobs for the given positions.
const jobs = await Promise.all(_.map(data.positions, async position => {
const roleSearchRequest = roleSearchRequests[position.roleSearchRequestId]
const job = {
projectId: project.id,
title: position.roleName,
numPositions: position.numberOfResources,
rateType: position.rateType,
workload: position.workload,
hoursPerWeek: position.hoursPerWeek,
skills: roleSearchRequest.skills,
description: roleSearchRequest.jobDescription,
roleIds: [roleSearchRequest.roleId],
resourceType: roleSearchRequest.resourceType
}
if (position.startMonth) {
job.startDate = position.startMonth
}
if (position.durationWeeks) {
job.duration = position.durationWeeks
}
return await JobService.createJob(currentUser, job, true)
}))
await eventDispatcher.handleEvent(config.TAAS_TEAM_CREATE_TOPIC, { project, jobs })
return { projectId: project.id }
}
createTeam.schema = Joi.object()
.keys({
currentUser: Joi.object().required(),
data: Joi.object().keys({
teamName: Joi.string().required(),
teamDescription: Joi.string(),
refCode: Joi.string(),
intakeSource: Joi.string(),
positions: Joi.array().items(
Joi.object().keys({
roleName: Joi.string().required(),
roleSearchRequestId: Joi.string().uuid().required(),
numberOfResources: Joi.number().integer().min(1).required(),
durationWeeks: Joi.number().integer().min(1),
startMonth: Joi.date(),
rateType: Joi.rateType().default('weekly'),
workload: Joi.workload().default('full-time'),
hoursPerWeek: Joi.number().integer().positive(),
resourceType: Joi.string()
}).required()
).required()
}).required()
})
.required()
/**
* @param {Array<string>} roleSearchRequestIds the roleSearchRequestIds
* @returns {Object} the roleSearchRequests
*/
async function _validateRoleSearchRequests (roleSearchRequestIds) {
const roleSearchRequests = {}
await Promise.all(_.map(roleSearchRequestIds, async roleSearchRequestId => {
// check if roleSearchRequest exists
const roleSearchRequest = await RoleSearchRequest.findById(roleSearchRequestId)
// store the found roleSearchRequest to avoid unnecessary DB calls
roleSearchRequests[roleSearchRequestId] = roleSearchRequest.toJSON()
// we can't create a job without a role
if (!roleSearchRequest.roleId) {
throw new errors.ConflictError(`roleSearchRequestId: ${roleSearchRequestId} must have roleId`)
}
const role = await Role.findById(roleSearchRequest.roleId)
// if roleSearchRequest doesn't have skills, we have to get skills through role
if (!roleSearchRequest.skills) {
if (!role.listOfSkills) {
throw new errors.ConflictError(`role: ${role.id} must have skills`)
}
// store role's skills
roleSearchRequests[roleSearchRequestId].skills = await getSkillIdsByNames(role.listOfSkills)
}
if (!roleSearchRequest.jobDescription) {
roleSearchRequests[roleSearchRequestId].jobDescription = role.description
}
roleSearchRequests[roleSearchRequestId].resourceType = role.name
}))
return roleSearchRequests
}
/**
* Search skills
* @param {Object} criteria the search criteria
* @returns {Object} the search result, contain total/page/perPage and result array
*/
async function searchSkills (criteria) {
return helper.getTopcoderSkills(criteria)
}
searchSkills.schema = Joi.object().keys({
criteria: Joi.object().keys({
page: Joi.page(),
perPage: Joi.perPage(),
orderBy: Joi.string()
}).required()
}).required()
/**
* Get member suggestions
* @param {object} currentUser the user performing the operation.
* @param {string} fragment the user's handle fragment
* @returns {Array} the search result, contains result array
*/
async function suggestMembers (currentUser, fragment) {
if (!currentUser.hasManagePermission) {
throw new errors.ForbiddenError('You are not allowed to perform this action!')
}
const { result } = await helper.getMembersSuggest(fragment)
return result.content
}
suggestMembers.schema = Joi.object().keys({
currentUser: Joi.object().required(),
fragment: Joi.string().required()
}).required()
/**
* Calculates total amount
* @param {Object} amount
* @returns {int} totalAmount
*/
async function calculateAmount (amount) {
const totalAmount = _.sum(_.map(amount, amt => amt.numberOfResources * amt.rate))
return { totalAmount }
}
/**
* Creates token for stripe
* @param {int} totalAmount
* @returns {string} paymentIntentToken
*/
async function createPayment (totalAmount) {
const dollarToCents = (totalAmount * 100)
const paymentIntent = await stripe.paymentIntents.create({
amount: dollarToCents,
currency: config.CURRENCY
})
return { paymentIntentToken: paymentIntent.client_secret }
}
module.exports = {
searchTeams,
getTeam,
getTeamJob,
sendEmail,
addMembers,
searchMembers,
searchInvites,
deleteMember,
getMe,
roleSearchRequest,
getRoleBySkills,
getSkillsByJobDescription,
getSkillNamesByIds,
getSkillIdsByNames,
createRoleSearchRequest,
isExternalMember,
createTeam,
calculateAmount,
createPayment,
searchSkills,
suggestMembers
}
<file_sep>/**
* Resotre the changed jobCandidates into its original state.
*/
const fs = require('fs')
const path = require('path')
const { JobCandidate } = require('../../src/models')
const logger = require('../../src/common/logger')
const currentStep = 'Restore'
async function restore () {
logger.info({ component: currentStep, message: '*************************** Restore process started ***************************' })
const filePath = path.join(__dirname, '/temp/')
const files = []
fs.readdirSync(filePath).forEach(async (file) => {
files.push(`${filePath}${file}`)
})
let totalSum = 0
for (let j = 0; j < files.length; j++) {
const data = fs.readFileSync(files[j], 'utf-8')
const jobCandidates = JSON.parse(data)
let summary = 0
for (var i = 0; i < jobCandidates.length; i++) {
const jc = await JobCandidate.findById(jobCandidates[i].id)
if (jc) {
const oldStatus = jc.status
const updated = await jc.update({ status: jobCandidates[i].status })
summary++
totalSum++
logger.info({ component: currentStep, message: `jobCandidate with ${jc.id} status restored from ${oldStatus} to ${updated.status}` })
}
};
logger.info({ component: `${currentStep} Sub`, message: `Restored ${summary} jobCandidates from ${files[j]}` })
}
logger.info({ component: currentStep, message: `Report: Totally restored ${totalSum} jobCandidates` })
logger.info({ component: currentStep, message: '*************************** Restore process finished ***************************' })
}
restore().then(() => {
logger.info({ component: currentStep, message: 'Execution Finished!' })
process.exit()
}).catch(err => {
logger.error(err.message)
process.exit(1)
})
<file_sep>'use strict';
const config = require('config')
/**
* Migrate ResourceBooking status - from assigned to placed.
*/
module.exports = {
up: async (queryInterface, Sequelize) => {
const tableName = `${config.DB_SCHEMA_NAME}.resource_bookings`
await queryInterface.sequelize.query(`UPDATE ${tableName} SET status = 'placed' WHERE status = 'assigned'`)
},
down: async (queryInterface, Sequelize) => {
const tableName = `${config.DB_SCHEMA_NAME}.resource_bookings`
await queryInterface.sequelize.query(`UPDATE ${tableName} SET status = 'assigned' WHERE status = 'placed'`)
}
};
<file_sep>
const { Sequelize, Model } = require('sequelize')
const config = require('config')
const errors = require('../common/errors')
module.exports = (sequelize) => {
class Job extends Model {
/**
* Create association between models
* @param {Object} models the database models
*/
static associate (models) {
Job._models = models
Job.hasMany(models.JobCandidate, { as: 'candidates', foreignKey: 'jobId' })
Job.hasMany(models.ResourceBooking, { foreignKey: 'jobId' })
}
/**
* Get job by id
* @param {String} id the job id
* @param {Boolean} withCandidates whether contains candidates
* @returns {Job} the Job instance
*/
static async findById (id, withCandidates = false) {
const criteria = {
where: {
id
}
}
if (withCandidates) {
criteria.include = [{
model: Job._models.JobCandidate,
as: 'candidates',
required: false
}]
}
const job = await Job.findOne(criteria)
if (!job) {
throw new errors.NotFoundError(`id: ${id} "Job" doesn't exists.`)
}
return job
}
}
Job.init(
{
id: {
type: Sequelize.UUID,
primaryKey: true,
allowNull: false,
defaultValue: Sequelize.UUIDV4
},
projectId: {
field: 'project_id',
type: Sequelize.INTEGER,
allowNull: false
},
externalId: {
field: 'external_id',
type: Sequelize.STRING(255)
},
description: {
type: Sequelize.TEXT // technically unlimited length
},
title: {
type: Sequelize.STRING(128),
allowNull: false
},
startDate: {
field: 'start_date',
type: Sequelize.DATE
},
duration: {
field: 'duration',
type: Sequelize.INTEGER
},
numPositions: {
field: 'num_positions',
type: Sequelize.INTEGER,
allowNull: false
},
resourceType: {
field: 'resource_type',
type: Sequelize.STRING(255)
},
rateType: {
field: 'rate_type',
type: Sequelize.STRING(255)
},
workload: {
field: 'workload',
type: Sequelize.STRING(45)
},
skills: {
type: Sequelize.JSONB,
allowNull: false
},
status: {
type: Sequelize.STRING(255),
allowNull: false
},
isApplicationPageActive: {
field: 'is_application_page_active',
type: Sequelize.BOOLEAN,
defaultValue: false,
allowNull: false
},
minSalary: {
field: 'min_salary',
type: Sequelize.INTEGER,
allowNull: true
},
maxSalary: {
field: 'max_salary',
type: Sequelize.INTEGER,
allowNull: true
},
hoursPerWeek: {
field: 'hours_per_week',
type: Sequelize.INTEGER,
allowNull: true
},
jobLocation: {
field: 'job_location',
type: Sequelize.STRING(255),
allowNull: true
},
jobTimezone: {
field: 'job_timezone',
type: Sequelize.STRING(128),
allowNull: true
},
currency: {
field: 'currency',
type: Sequelize.STRING(30),
allowNull: true
},
roleIds: {
field: 'role_ids',
type: Sequelize.ARRAY({
type: Sequelize.UUID
})
},
createdBy: {
field: 'created_by',
type: Sequelize.UUID,
allowNull: false
},
updatedBy: {
field: 'updated_by',
type: Sequelize.UUID
},
createdAt: {
field: 'created_at',
type: Sequelize.DATE
},
updatedAt: {
field: 'updated_at',
type: Sequelize.DATE
},
deletedAt: {
field: 'deleted_at',
type: Sequelize.DATE
}
},
{
schema: config.DB_SCHEMA_NAME,
sequelize,
tableName: 'jobs',
paranoid: true,
deletedAt: 'deletedAt',
createdAt: 'createdAt',
updatedAt: 'updatedAt',
timestamps: true,
defaultScope: {
attributes: {
exclude: ['deletedAt']
}
},
hooks: {
afterCreate: (job) => {
delete job.dataValues.deletedAt
}
}
}
)
return Job
}
<file_sep>/**
* This service provides operations of Interview.
*/
const _ = require('lodash')
const Joi = require('joi')
const moment = require('moment')
const config = require('config')
const { Op, ForeignKeyConstraintError } = require('sequelize')
const { v4: uuid, validate: uuidValidate } = require('uuid')
const { Interviews: InterviewConstants } = require('../../app-constants')
const helper = require('../common/helper')
const logger = require('../common/logger')
const errors = require('../common/errors')
const models = require('../models')
const {
processRequestInterview,
processUpdateInterview,
processBulkUpdateInterviews
} = require('../esProcessors/InterviewProcessor')
const {
processUpdate: jobCandidateProcessUpdate
} = require('../esProcessors/JobCandidateProcessor')
const sequelize = models.sequelize
const Interview = models.Interview
const esClient = helper.getESClient()
/**
* Ensures user is permitted for the operation.
*
* @param {Object} currentUser the user who perform this operation.
* @param {String} jobCandidateId the job candidate id
* @throws {errors.ForbiddenError}
*/
async function ensureUserIsPermitted (currentUser, jobCandidateId) {
if (!currentUser.hasManagePermission && !currentUser.isMachine) {
const jobCandidate = await models.JobCandidate.findById(jobCandidateId)
const job = await jobCandidate.getJob()
await helper.checkIsMemberOfProject(currentUser.userId, job.projectId)
}
}
/**
* Handles common sequelize errors
* @param {Object} err error
* @param {String} jobCandidateId the job candidate id
*/
function handleSequelizeError (err, jobCandidateId) {
// jobCandidateId doesn't exist - gracefully handle
if (err instanceof ForeignKeyConstraintError) {
throw new errors.NotFoundError(
`The job candidate with id=${jobCandidateId} doesn't exist.`
)
}
// another type of sequelize error - extract the details and throw
const errDetail = _.get(err, 'original.detail')
if (errDetail) {
throw new errors.BadRequestError(errDetail)
}
}
/**
* Get interview by round
* @param {Object} currentUser the user who perform this operation.
* @param {String} jobCandidateId the job candidate id
* @param {Number} round the interview round
* @param {Boolean} fromDb flag if query db for data or not
* @returns {Object} the interview
*/
async function getInterviewByRound (currentUser, jobCandidateId, round, fromDb = false) {
// check permission
await ensureUserIsPermitted(currentUser, jobCandidateId)
if (!fromDb) {
try {
// get job candidate from ES
const jobCandidateES = await esClient.get({
index: config.esConfig.ES_INDEX_JOB_CANDIDATE,
id: jobCandidateId
})
// extract interviews from ES object
const jobCandidateInterviews = _.get(jobCandidateES, 'body._source.interviews', [])
// find interview by round
const interview = _.find(jobCandidateInterviews, interview => interview.round === round)
if (interview) {
return interview
}
// if reaches here, the interview with this round is not found
throw new errors.NotFoundError(`Interview doesn't exist with round: ${round}`)
} catch (err) {
if (helper.isDocumentMissingException(err)) {
throw new errors.NotFoundError(`id: ${jobCandidateId} "JobCandidate" not found`)
}
logger.logFullError(err, { component: 'InterviewService', context: 'getInterviewByRound' })
throw err
}
}
// either ES query failed or `fromDb` is set - fallback to DB
logger.info({ component: 'InterviewService', context: 'getInterviewByRound', message: 'try to query db for data' })
const interview = await Interview.findOne({
where: { jobCandidateId, round }
})
// throw NotFound error if doesn't exist
if (!!interview !== true) {
throw new errors.NotFoundError(`Interview doesn't exist with jobCandidateId: ${jobCandidateId} and round: ${round}`)
}
return interview.dataValues
}
getInterviewByRound.schema = Joi.object().keys({
currentUser: Joi.object().required(),
jobCandidateId: Joi.string().uuid().required(),
round: Joi.number().integer().positive().required(),
fromDb: Joi.boolean()
}).required()
/**
* Get interview by id
* @param {Object} currentUser the user who perform this operation.
* @param {String} id the interview or xai id
* @param {Boolean} fromDb flag if query db for data or not
* @returns {Object} the interview
*/
async function getInterviewById (currentUser, id, fromDb = false) {
if (!fromDb) {
try {
// construct query for nested search
const esQueryBody = {
_source: false,
query: {
nested: {
path: 'interviews',
query: {
bool: {
should: []
}
},
inner_hits: {}
}
}
}
// add filtering terms
// interviewId
esQueryBody.query.nested.query.bool.should.push({
term: {
'interviews.id': {
value: id
}
}
})
// xaiId
esQueryBody.query.nested.query.bool.should.push({
term: {
'interviews.xaiId': {
value: id
}
}
})
// search
const { body } = await esClient.search({
index: config.esConfig.ES_INDEX_JOB_CANDIDATE,
body: esQueryBody
})
// extract inner interview hit from body - there's always one jobCandidate & interview hit as we search with IDs
const interview = _.get(body, 'hits.hits[0].inner_hits.interviews.hits.hits[0]._source')
if (interview) {
// check permission before returning
await ensureUserIsPermitted(currentUser, interview.jobCandidateId)
return interview
}
// if reaches here, the interview with this IDs is not found
throw new errors.NotFoundError(`Interview doesn't exist with id/xaiId: ${id}`)
} catch (err) {
logger.logFullError(err, { component: 'InterviewService', context: 'getInterviewById' })
throw err
}
}
// either ES query failed or `fromDb` is set - fallback to DB
logger.info({ component: 'InterviewService', context: 'getInterviewById', message: 'try to query db for data' })
var interview
if (uuidValidate(id)) {
interview = await Interview.findOne({
where: {
[Op.or]: [
{ id }
]
}
})
} else {
interview = await Interview.findOne({
where: {
[Op.or]: [
{ xaiId: id }
]
}
})
}
// throw NotFound error if doesn't exist
if (!!interview !== true) {
throw new errors.NotFoundError(`Interview doesn't exist with id/xaiId: ${id}`)
}
// check permission before returning
await ensureUserIsPermitted(currentUser, interview.jobCandidateId)
return interview.dataValues
}
getInterviewById.schema = Joi.object().keys({
currentUser: Joi.object().required(),
id: Joi.string().required(),
fromDb: Joi.boolean()
}).required()
/**
* Request interview
* @param {Object} currentUser the user who perform this operation
* @param {String} jobCandidateId the job candidate id
* @param {Object} interview the interview to be created
* @returns {Object} the created/requested interview
*/
async function requestInterview (currentUser, jobCandidateId, interview) {
// check permission
await ensureUserIsPermitted(currentUser, jobCandidateId)
// find the round count
const round = await Interview.count({
where: { jobCandidateId }
})
// throw error if candidate has already had MaxAllowedCount interviews
if (round >= InterviewConstants.MaxAllowedCount) {
throw new errors.ConflictError(`You've reached the maximum allowed number (${InterviewConstants.MaxAllowedCount}) of interviews for this candidate.`)
}
// get job candidate user details
const jobCandidate = await models.JobCandidate.findById(jobCandidateId)
const jobCandidateUser = await helper.getUserById(jobCandidate.userId)
const jobCandidateMember = await helper.getUserByHandle(jobCandidateUser.handle)
// pre-populate fields
interview.id = uuid()
interview.jobCandidateId = jobCandidateId
interview.round = round + 1
interview.duration = InterviewConstants.XaiTemplate[interview.templateUrl]
interview.createdBy = await helper.getUserId(currentUser.userId)
interview.guestEmails = [jobCandidateMember.email, ...interview.guestEmails]
// pre-populate hostName & guestNames
const hostMembers = await helper.getMemberDetailsByEmails([interview.hostEmail])
const guestMembers = await helper.getMemberDetailsByEmails(interview.guestEmails)
interview.hostName = `${hostMembers[0].firstName} ${hostMembers[0].lastName}`
interview.guestNames = _.map(interview.guestEmails, (guestEmail) => {
var foundGuestMember = _.find(guestMembers, function (guestMember) { return guestEmail === guestMember.email })
return (foundGuestMember !== undefined) ? `${foundGuestMember.firstName} ${foundGuestMember.lastName}` : guestEmail.split('@')[0]
})
let entity
let jobCandidateEntity
try {
await sequelize.transaction(async (t) => {
// create the interview
const created = await Interview.create(interview, { transaction: t })
entity = created.toJSON()
await processRequestInterview(entity)
// update jobCandidate.status to Interview
const [, affectedRows] = await models.JobCandidate.update(
{ status: 'interview' },
{ where: { id: created.jobCandidateId }, returning: true, transaction: t }
)
jobCandidateEntity = _.omit(_.get(affectedRows, '0.dataValues'), 'deletedAt')
await jobCandidateProcessUpdate(jobCandidateEntity)
})
} catch (err) {
if (entity) {
helper.postErrorEvent(config.TAAS_ERROR_TOPIC, entity, 'interview.request')
}
// gracefully handle if one of the common sequelize errors
handleSequelizeError(err, jobCandidateId)
// if reaches here, it's not one of the common errors handled in `handleSequelizeError`
throw err
}
await helper.postEvent(config.TAAS_INTERVIEW_REQUEST_TOPIC, entity)
await helper.postEvent(config.TAAS_JOB_CANDIDATE_UPDATE_TOPIC, jobCandidateEntity)
// return created interview
return entity
}
requestInterview.schema = Joi.object().keys({
currentUser: Joi.object().required(),
jobCandidateId: Joi.string().uuid().required(),
interview: Joi.object().keys({
calendarEventId: Joi.string().allow(null),
templateUrl: Joi.xaiTemplate().required(),
hostEmail: Joi.string().email().required(),
guestEmails: Joi.array().items(Joi.string().email()).default([]),
status: Joi.interviewStatus().default(InterviewConstants.Status.Scheduling)
}).required()
}).required()
/**
* Updates interview
*
* @param {Object} currentUser user who performs the operation
* @param {Object} interview the existing interview object
* @param {Object} data object containing updated fields
* @returns {Object} updated interview
*/
async function partiallyUpdateInterview (currentUser, interview, data) {
// only status can be updated for Completed interviews
if (interview.status === InterviewConstants.Status.Completed) {
const updatedFields = _.keys(data)
if (updatedFields.length !== 1 || !_.includes(updatedFields, 'status')) {
throw new errors.BadRequestError('Only the "status" can be updated for Completed interviews.')
}
}
// automatically set endTimestamp if startTimestamp is provided
if (data.startTimestamp && !!data.endTimestamp !== true) {
data.endTimestamp = moment(data.startTimestamp).add(interview.duration, 'minutes').toDate()
}
data.updatedBy = await helper.getUserId(currentUser.userId)
let entity
try {
await sequelize.transaction(async (t) => {
const updated = await interview.update(data, { transaction: t })
entity = updated.toJSON()
await processUpdateInterview(entity)
})
} catch (err) {
if (entity) {
helper.postErrorEvent(config.TAAS_ERROR_TOPIC, entity, 'interview.update')
}
// gracefully handle if one of the common sequelize errors
handleSequelizeError(err, interview.jobCandidateId)
// if reaches here, it's not one of the common errors handled in `handleSequelizeError`
throw err
}
await helper.postEvent(config.TAAS_INTERVIEW_UPDATE_TOPIC, entity, { oldValue: interview.toJSON() })
return entity
}
/**
* Patch (partially update) interview by round
* @param {Object} currentUser the user who perform this operation
* @param {String} jobCandidateId the job candidate id
* @param {Number} round the interview round
* @param {Object} data object containing patched fields
* @returns {Object} the patched interview object
*/
async function partiallyUpdateInterviewByRound (currentUser, jobCandidateId, round, data) {
const interview = await Interview.findOne({
where: {
jobCandidateId, round
}
})
// throw NotFound error if doesn't exist
if (!!interview !== true) {
throw new errors.NotFoundError(`Interview doesn't exist with jobCandidateId: ${jobCandidateId} and round: ${round}`)
}
// check permission
await ensureUserIsPermitted(currentUser, jobCandidateId)
return await partiallyUpdateInterview(currentUser, interview, data)
}
partiallyUpdateInterviewByRound.schema = Joi.object().keys({
currentUser: Joi.object().required(),
jobCandidateId: Joi.string().uuid().required(),
round: Joi.number().integer().positive().required(),
data: Joi.object().keys({
xaiId: Joi.string().allow(null),
calendarEventId: Joi.string().when('status', {
is: [InterviewConstants.Status.Scheduled, InterviewConstants.Status.Rescheduled],
then: Joi.required(),
otherwise: Joi.allow(null)
}),
templateUrl: Joi.xaiTemplate(),
templateId: Joi.string().allow(null),
templateType: Joi.string().allow(null),
title: Joi.string().allow(null),
locationDetails: Joi.string().allow(null),
startTimestamp: Joi.date().greater('now').when('status', {
is: [InterviewConstants.Status.Scheduled, InterviewConstants.Status.Rescheduled],
then: Joi.required(),
otherwise: Joi.allow(null)
}),
endTimestamp: Joi.date().greater(Joi.ref('startTimestamp')).when('status', {
is: [InterviewConstants.Status.Scheduled, InterviewConstants.Status.Rescheduled],
then: Joi.required(),
otherwise: Joi.allow(null)
}),
hostName: Joi.string(),
hostEmail: Joi.string().email(),
guestNames: Joi.array().items(Joi.string()).allow(null),
guestEmails: Joi.array().items(Joi.string().email()).allow(null),
status: Joi.interviewStatus(),
rescheduleUrl: Joi.string().allow(null),
deletedAt: Joi.date().allow(null)
}).required().min(1) // at least one key - i.e. don't allow empty object
}).required()
/**
* Patch (partially update) interview by id
* @param {Object} currentUser the user who perform this operation
* @param {String} id the interview or x.ai meeting id
* @param {Object} data object containing patched fields
* @returns {Object} the patched interview object
*/
async function partiallyUpdateInterviewById (currentUser, id, data) {
var interview
if (uuidValidate(id)) {
interview = await Interview.findOne({
where: {
[Op.or]: [
{ id }
]
}
})
} else {
interview = await Interview.findOne({
where: {
[Op.or]: [
{ xaiId: id }
]
}
})
}
// throw NotFound error if doesn't exist
if (!!interview !== true) {
throw new errors.NotFoundError(`Interview doesn't exist with id/xaiId: ${id}`)
}
// check permission
await ensureUserIsPermitted(currentUser, interview.jobCandidateId)
return await partiallyUpdateInterview(currentUser, interview, data)
}
partiallyUpdateInterviewById.schema = Joi.object().keys({
currentUser: Joi.object().required(),
id: Joi.string().required(),
data: Joi.object().keys({
xaiId: Joi.string().required(),
calendarEventId: Joi.string().when('status', {
is: [InterviewConstants.Status.Scheduled, InterviewConstants.Status.Rescheduled],
then: Joi.required(),
otherwise: Joi.allow(null)
}),
templateUrl: Joi.xaiTemplate(),
templateId: Joi.string().allow(null),
templateType: Joi.string().allow(null),
title: Joi.string().allow(null),
locationDetails: Joi.string().allow(null),
startTimestamp: Joi.date().greater('now').when('status', {
is: [InterviewConstants.Status.Scheduled, InterviewConstants.Status.Rescheduled],
then: Joi.required(),
otherwise: Joi.allow(null)
}),
endTimestamp: Joi.date().greater(Joi.ref('startTimestamp')).when('status', {
is: [InterviewConstants.Status.Scheduled, InterviewConstants.Status.Rescheduled],
then: Joi.required(),
otherwise: Joi.allow(null)
}),
hostName: Joi.string(),
hostEmail: Joi.string().email(),
guestNames: Joi.array().items(Joi.string()).allow(null),
guestEmails: Joi.array().items(Joi.string().email()).allow(null),
status: Joi.interviewStatus(),
rescheduleUrl: Joi.string().allow(null),
deletedAt: Joi.date().allow(null)
}).required().min(1) // at least one key - i.e. don't allow empty object
}).required()
/**
* List interviews
* @param {Object} currentUser the user who perform this operation.
* @param {String} jobCandidateId the job candidate id
* @param {Object} criteria the search criteria
* @returns {Object} the search result, contain total/page/perPage and result array
*/
async function searchInterviews (currentUser, jobCandidateId, criteria) {
// check permission
await ensureUserIsPermitted(currentUser, jobCandidateId)
const { page, perPage } = criteria
try {
// construct query for nested search
const esQueryBody = {
_source: false,
query: {
nested: {
path: 'interviews',
query: {
bool: {
must: []
}
},
inner_hits: {
size: 100 // max. inner_hits size
}
}
}
}
// add filtering terms
// jobCandidateId
esQueryBody.query.nested.query.bool.must.push({
term: {
'interviews.jobCandidateId': {
value: jobCandidateId
}
}
})
// criteria
_.each(_.pick(criteria, ['status', 'createdAt', 'updatedAt']), (value, key) => {
const innerKey = `interviews.${key}`
esQueryBody.query.nested.query.bool.must.push({
term: {
[innerKey]: {
value
}
}
})
})
// search
const { body } = await esClient.search({
index: config.esConfig.ES_INDEX_JOB_CANDIDATE,
body: esQueryBody
})
// get jobCandidate hit from body - there's always one jobCandidate hit as we search via jobCandidateId
// extract inner interview hits from jobCandidate
const interviewHits = _.get(body, 'hits.hits[0].inner_hits.interviews.hits.hits', [])
const interviews = _.map(interviewHits, '_source')
// we need to sort & paginate interviews manually
// as it's not possible with ES query on nested type
// (ES applies pagination & sorting on parent documents, not on the nested objects)
// sort
const sortedInterviews = _.orderBy(interviews, criteria.sortBy, criteria.sortOrder)
// paginate
const start = (page - 1) * perPage
const end = start + perPage
const paginatedInterviews = _.slice(sortedInterviews, start, end)
return {
total: sortedInterviews.length,
page,
perPage,
result: paginatedInterviews
}
} catch (err) {
logger.logFullError(err, { component: 'InterviewService', context: 'searchInterviews' })
}
logger.info({ component: 'InterviewService', context: 'searchInterviews', message: 'fallback to DB query' })
const filter = {
[Op.and]: [{ jobCandidateId }]
}
// apply filtering based on criteria
_.each(_.pick(criteria, ['status', 'createdAt', 'updatedAt']), (value, key) => {
filter[Op.and].push({ [key]: value })
})
const interviews = await Interview.findAll({
where: filter,
offset: ((page - 1) * perPage),
limit: perPage,
order: [[criteria.sortBy, criteria.sortOrder]]
})
const total = await Interview.count({ where: filter })
return {
fromDb: true,
total,
page,
perPage,
result: _.map(interviews, interview => interview.dataValues)
}
}
searchInterviews.schema = Joi.object().keys({
currentUser: Joi.object().required(),
jobCandidateId: Joi.string().uuid().required(),
criteria: Joi.object().keys({
page: Joi.page(),
perPage: Joi.perPage(),
sortBy: Joi.string().valid('round', 'createdAt', 'updatedAt').default('createdAt'),
sortOrder: Joi.string().valid('desc', 'asc').default('desc'),
createdAt: Joi.date(),
updatedAt: Joi.date(),
status: Joi.interviewStatus()
}).required()
}).required()
/**
* Updates the status of completed (based on startTimestamp) interviews.
* If the startTimestamp of an interview is less than (or equal) the (currentDateTime - 1 hour)
* it's considered as completed.
*/
async function updateCompletedInterviews () {
logger.info({ component: 'InterviewService', context: 'updateCompletedInterviews', message: 'Running the scheduled job...' })
const oneHourAgo = new Date(Date.now() - 60 * 60 * 1000)
let entity
let affectedCount
try {
await sequelize.transaction(async (t) => {
const updated = await Interview.update(
// '00000000-0000-0000-0000-000000000000' - to indicate it's updated by the system job
{ status: InterviewConstants.Status.Completed, updatedBy: '00000000-0000-0000-0000-000000000000' },
{
where: {
status: [InterviewConstants.Status.Scheduled, InterviewConstants.Status.Rescheduled],
startTimestamp: {
[Op.lte]: oneHourAgo
}
},
returning: true,
transaction: t
}
)
let updatedRows
[affectedCount, updatedRows] = updated
// post event if there are affected/updated interviews
if (affectedCount > 0) {
// payload format:
// {
// jobCandidateId: { interviewId: { affectedFields }, interviewId2: { affectedFields }, ... },
// jobCandidateId2: { interviewId: { affectedFields }, interviewId2: { affectedFields }, ... },
// ...
// }
const bulkUpdatePayload = {}
// construct payload
_.forEach(updatedRows, row => {
const interview = row.toJSON()
const affectedFields = _.pick(interview, ['status', 'updatedBy', 'updatedAt'])
_.set(bulkUpdatePayload, [interview.jobCandidateId, interview.id], affectedFields)
})
entity = bulkUpdatePayload
await processBulkUpdateInterviews(bulkUpdatePayload)
}
})
} catch (e) {
if (entity) {
helper.postErrorEvent(config.TAAS_ERROR_TOPIC, entity, 'interview.bulkupdate')
}
throw e
}
if (affectedCount) {
// post event
await helper.postEvent(config.TAAS_INTERVIEW_BULK_UPDATE_TOPIC, entity)
}
logger.info({ component: 'InterviewService', context: 'updateCompletedInterviews', message: `Completed running. Updated ${affectedCount} interviews.` })
}
module.exports = {
getInterviewByRound,
getInterviewById,
requestInterview,
partiallyUpdateInterviewByRound,
partiallyUpdateInterviewById,
searchInterviews,
updateCompletedInterviews
}
<file_sep>const { Sequelize, Model } = require('sequelize')
const config = require('config')
const errors = require('../common/errors')
module.exports = (sequelize) => {
class WorkPeriod extends Model {
/**
* Create association between models
* @param {Object} models the database models
*/
static associate (models) {
WorkPeriod._models = models
WorkPeriod.belongsTo(models.ResourceBooking, { foreignKey: 'resourceBookingId' })
WorkPeriod.hasMany(models.WorkPeriodPayment, { as: 'payments', foreignKey: 'workPeriodId' })
}
/**
* Get work period by id
* @param {String} id the work period id
* @param {Object} options { withPayments: true/false } whether contains payments
* @returns {WorkPeriod} the work period instance
*/
static async findById (id, options = { withPayments: false, exclude: [] }) {
const criteria = {
where: {
id
}
}
if (options.exclude && options.exclude.length > 0) {
criteria.attributes = { exclude: options.exclude }
}
if (options.withPayments) {
criteria.include = [{
model: WorkPeriod._models.WorkPeriodPayment,
as: 'payments',
required: false
}]
}
const workPeriod = await WorkPeriod.findOne(criteria)
if (!workPeriod) {
throw new errors.NotFoundError(`id: ${id} "WorkPeriod" doesn't exists.`)
}
return workPeriod
}
}
WorkPeriod.init(
{
id: {
type: Sequelize.UUID,
primaryKey: true,
allowNull: false,
defaultValue: Sequelize.UUIDV4
},
resourceBookingId: {
field: 'resource_booking_id',
type: Sequelize.UUID,
allowNull: false
},
sentSurvey: {
field: 'sent_survey',
type: Sequelize.BOOLEAN,
defaultValue: false,
allowNull: false
},
sentSurveyError: {
field: 'sent_survey_error',
allowNull: true,
type: Sequelize.JSONB({
errorCode: {
field: 'error_code',
type: Sequelize.INTEGER
},
errorMessage: {
field: 'error_message',
type: Sequelize.STRING(255)
}
})
},
userHandle: {
field: 'user_handle',
type: Sequelize.STRING(50),
allowNull: false
},
projectId: {
field: 'project_id',
type: Sequelize.INTEGER,
allowNull: false
},
startDate: {
field: 'start_date',
type: Sequelize.DATEONLY,
allowNull: false
},
endDate: {
field: 'end_date',
type: Sequelize.DATEONLY,
allowNull: false
},
daysWorked: {
field: 'days_worked',
type: Sequelize.INTEGER,
allowNull: false
},
daysPaid: {
field: 'days_paid',
type: Sequelize.INTEGER,
allowNull: false
},
paymentTotal: {
field: 'payment_total',
type: Sequelize.FLOAT,
allowNull: false
},
paymentStatus: {
field: 'payment_status',
type: Sequelize.STRING(50),
allowNull: false
},
createdBy: {
field: 'created_by',
type: Sequelize.UUID,
allowNull: false
},
updatedBy: {
field: 'updated_by',
type: Sequelize.UUID
},
createdAt: {
field: 'created_at',
type: Sequelize.DATE
},
updatedAt: {
field: 'updated_at',
type: Sequelize.DATE
},
deletedAt: {
field: 'deleted_at',
type: Sequelize.DATE
}
},
{
schema: config.DB_SCHEMA_NAME,
sequelize,
tableName: 'work_periods',
paranoid: true,
deletedAt: 'deletedAt',
createdAt: 'createdAt',
updatedAt: 'updatedAt',
timestamps: true,
defaultScope: {
attributes: {
exclude: ['deletedAt']
}
},
hooks: {
afterCreate: (workPeriod) => {
delete workPeriod.dataValues.deletedAt
}
},
indexes: [
{
unique: true,
fields: ['resource_booking_id', 'start_date', 'end_date'],
where: {
deleted_at: null
}
}
]
}
)
return WorkPeriod
}
<file_sep>/**
* Sync the database models to db tables.
*/
const config = require('config')
const fs = require('fs')
const models = require('./models')
const logger = require('./common/logger')
// the directory at which migration scripts are located
const MigrationsDirPath = './migrations'
/**
* List the filenames of the migration files.
*
* @returns {Array} the list of filenames
*/
function listMigrationFiles () {
const filenames = fs.readdirSync(MigrationsDirPath)
return filenames
}
const initDB = async () => {
if (process.argv[2] === 'force') {
await models.sequelize.dropSchema(config.DB_SCHEMA_NAME)
}
await models.sequelize.createSchema(config.DB_SCHEMA_NAME)
// define SequelizeMeta table
const SequelizeMeta = await models.sequelize.define('SequelizeMeta', {
name: {
type: models.Sequelize.STRING(255),
allowNull: false
}
}, { timestamps: false })
// re-init all tables including the SequelizeMeta table
await models.sequelize.sync({ force: true })
// add filenames of existing migration scripts to the SequelizeMeta table
await SequelizeMeta.bulkCreate(listMigrationFiles().map(filename => ({ name: filename })))
}
if (!module.parent) {
initDB().then(() => {
logger.info({ component: 'init-db', message: 'Database synced successfully' })
process.exit()
}).catch((e) => {
logger.logFullError(e, { component: 'init-db' })
process.exit(1)
})
}
module.exports = {
initDB
}
<file_sep>const { Sequelize, Model } = require('sequelize')
const _ = require('lodash')
const config = require('config')
const errors = require('../common/errors')
const { WorkPeriodPaymentStatus } = require('../../app-constants')
module.exports = (sequelize) => {
class WorkPeriodPayment extends Model {
/**
* Create association between models
* @param {Object} models the database models
*/
static associate (models) {
WorkPeriodPayment.belongsTo(models.WorkPeriod, { foreignKey: 'workPeriodId' })
}
/**
* Get work period by id
* @param {String} id the work period id
* @returns {WorkPeriodPayment} the work period payment instance
*/
static async findById (id) {
const workPeriodPayment = await WorkPeriodPayment.findOne({
where: {
id
}
})
if (!workPeriodPayment) {
throw new errors.NotFoundError(`id: ${id} "WorkPeriodPayment" doesn't exists`)
}
return workPeriodPayment
}
}
WorkPeriodPayment.init(
{
id: {
type: Sequelize.UUID,
primaryKey: true,
allowNull: false,
defaultValue: Sequelize.UUIDV4
},
workPeriodId: {
field: 'work_period_id',
type: Sequelize.UUID,
allowNull: false
},
challengeId: {
field: 'challenge_id',
type: Sequelize.UUID
},
memberRate: {
field: 'member_rate',
type: Sequelize.FLOAT,
allowNull: false
},
customerRate: {
field: 'customer_rate',
type: Sequelize.FLOAT
},
days: {
type: Sequelize.INTEGER,
allowNull: false
},
amount: {
type: Sequelize.DOUBLE,
allowNull: false
},
status: {
type: Sequelize.ENUM(_.values(WorkPeriodPaymentStatus)),
allowNull: false
},
statusDetails: {
field: 'status_details',
type: Sequelize.JSONB
},
billingAccountId: {
field: 'billing_account_id',
type: Sequelize.BIGINT,
allowNull: false
},
createdBy: {
field: 'created_by',
type: Sequelize.UUID,
allowNull: false
},
updatedBy: {
field: 'updated_by',
type: Sequelize.UUID
},
createdAt: {
field: 'created_at',
type: Sequelize.DATE
},
updatedAt: {
field: 'updated_at',
type: Sequelize.DATE
},
deletedAt: {
field: 'deleted_at',
type: Sequelize.DATE
}
},
{
schema: config.DB_SCHEMA_NAME,
sequelize,
tableName: 'work_period_payments',
paranoid: true,
deletedAt: 'deletedAt',
createdAt: 'createdAt',
updatedAt: 'updatedAt',
timestamps: true,
defaultScope: {
attributes: {
exclude: ['deletedAt']
}
},
hooks: {
afterCreate: (workPeriodPayment) => {
delete workPeriodPayment.dataValues.deletedAt
}
}
}
)
return WorkPeriodPayment
}
<file_sep>const config = require('config')
/*
* Add roleIds field to the Job model.
*/
module.exports = {
up: async (queryInterface, Sequelize) => {
await queryInterface.addColumn({ tableName: 'jobs', schema: config.DB_SCHEMA_NAME }, 'role_ids',
{
type: Sequelize.ARRAY({
type: Sequelize.UUID
})
})
},
down: async (queryInterface, Sequelize) => {
await queryInterface.removeColumn({ tableName: 'jobs', schema: config.DB_SCHEMA_NAME }, 'role_ids')
}
}
<file_sep>/*
* The Report class.
*/
const logger = require('./logger')
const constants = require('./constants')
const _ = require('lodash')
class Report {
constructor () {
this.messages = []
}
// append a message to the report
add (message) {
this.messages.push(message)
}
// print the last message to the console
print () {
const lastMessage = this.messages[this.messages.length - 1]
const output = `#${lastMessage.lnum} - ${_.map(lastMessage.info, 'text').join('; ')}`
if (lastMessage.status === constants.ProcessingStatus.Skipped) {
logger.warn(output)
}
if (lastMessage.status === constants.ProcessingStatus.Successful) {
logger.info(output)
}
if (lastMessage.status === constants.ProcessingStatus.Failed) {
logger.error(output)
}
}
// print a summary to the console
printSummary () {
// summarize total success, failure, skips
const groups = _.groupBy(this.messages, 'status')
const success = groups[constants.ProcessingStatus.Successful] || []
const failure = groups[constants.ProcessingStatus.Failed] || []
const skips = groups[constants.ProcessingStatus.Skipped] || []
// summarize records created or already existing
const groupsByTag = _.groupBy(_.flatten(_.map(this.messages, message => message.info)), 'tag')
const jobsCreated = groupsByTag.job_created || []
const resourceBookingsCreated = groupsByTag.resource_booking_created || []
const jobsAlreadyExist = groupsByTag.job_already_exists || []
const resourceBookingsAlreadyExist = groupsByTag.resource_booking_already_exists || []
const validationErrors = groupsByTag.validation_error || []
const userNotFound = groupsByTag.user_not_found || []
const externalIdMissing = groupsByTag.external_id_missing || []
const requestError = groupsByTag.request_error || []
const internalError = groupsByTag.internal_error || []
logger.info('=== summary ===')
logger.info(`total: ${this.messages.length}`)
logger.info(`success: ${success.length}`)
logger.info(`failure: ${failure.length}`)
logger.info(`skips: ${skips.length}`)
logger.info(`jobs created: ${jobsCreated.length}`)
logger.info(`resource bookings created: ${resourceBookingsCreated.length}`)
logger.info(`jobs already exist: ${jobsAlreadyExist.length}`)
logger.info(`resource bookings already exist: ${resourceBookingsAlreadyExist.length}`)
logger.info(`validation errors: ${validationErrors.length}`)
logger.info(`user not found: ${userNotFound.length}`)
logger.info(`external id missing: ${externalIdMissing.length}`)
logger.info(`request error: ${requestError.length}`)
logger.info(`internal error: ${internalError.length}`)
logger.info('=== summary ===')
}
}
module.exports = Report
<file_sep>module.exports = {
LOG_LEVEL: process.env.LOG_LEVEL || 'info',
AUTH0_URL: 'http://example.com',
AUTH0_AUDIENCE: 'http://example.com',
AUTH0_AUDIENCE_UBAHN: 'http://example.com',
AUTH0_CLIENT_ID: 'fake_id',
AUTH0_CLIENT_SECRET: 'fake_secret'
}
<file_sep>/**
* Reindex JobCandidates data in Elasticsearch using data from database
*/
const config = require('config')
const { Interview } = require('../../src/models')
const logger = require('../../src/common/logger')
const helper = require('../../src/common/helper')
const jobCandidateId = helper.getParamFromCliArgs()
const index = config.get('esConfig.ES_INDEX_JOB_CANDIDATE')
const reIndexAllJobCandidatesPrompt = `WARNING: this would remove existent data! Are you sure you want to reindex the index ${index}?`
const reIndexJobCandidatePrompt = `WARNING: this would remove existent data! Are you sure you want to reindex the document with id ${jobCandidateId} in index ${index}?`
const jobCandidateModelOpts = {
modelName: 'JobCandidate',
include: [{
model: Interview,
as: 'interviews'
}]
}
async function reIndexJobCandidates () {
if (jobCandidateId === null) {
await helper.promptUser(reIndexAllJobCandidatesPrompt, async () => {
try {
await helper.indexBulkDataToES(jobCandidateModelOpts, index, logger)
process.exit(0)
} catch (err) {
logger.logFullError(err, { component: 'reIndexJobCandidates' })
process.exit(1)
}
})
} else {
await helper.promptUser(reIndexJobCandidatePrompt, async () => {
try {
await helper.indexDataToEsById(jobCandidateId, jobCandidateModelOpts, index, logger)
process.exit(0)
} catch (err) {
logger.logFullError(err, { component: 'reIndexJobCandidates' })
process.exit(1)
}
})
}
}
reIndexJobCandidates()
<file_sep>/*
* Provide some commonly used functions for scripts.
*/
const csv = require('csv-parser')
const fs = require('fs')
const request = require('superagent')
/**
* Load CSV data from file.
*
* @param {String} pathname the pathname for the file
* @param {Object} fieldNameMap mapping values of headers
* @returns {Array} the result jobs data
*/
async function loadCSVFromFile (pathname, fieldNameMap = {}) {
let lnum = 1
const result = []
return new Promise((resolve, reject) => {
fs.createReadStream(pathname)
.pipe(csv({
mapHeaders: ({ header }) => fieldNameMap[header] || header
}))
.on('data', (data) => {
result.push({ ...data, _lnum: lnum })
lnum += 1
})
.on('error', err => reject(err))
.on('end', () => resolve(result))
})
}
/**
* Get pathname from command line arguments.
*
* @returns {String} the pathname
*/
function getPathnameFromCommandline () {
if (process.argv.length < 3) {
throw new Error('pathname for the csv file is required')
}
const pathname = process.argv[2]
if (!fs.existsSync(pathname)) {
throw new Error(`pathname: ${pathname} path not exist`)
}
if (!fs.lstatSync(pathname).isFile()) {
throw new Error(`pathname: ${pathname} path is not a regular file`)
}
return pathname
}
/**
* Sleep for a given number of milliseconds.
*
* @param {Number} milliseconds the sleep time
* @returns {undefined}
*/
async function sleep (milliseconds) {
return new Promise((resolve) => setTimeout(resolve, milliseconds))
}
/**
* Find taas job by external id.
*
* @param {String} token the auth token
* @param {String} taasApiUrl url for TaaS API
* @param {String} externalId the external id
* @returns {Object} the result
*/
async function getJobByExternalId (token, taasApiUrl, externalId) {
const { body: jobs } = await request.get(`${taasApiUrl}/jobs`)
.query({ externalId })
.set('Authorization', `Bearer ${token}`)
if (!jobs.length) {
throw new Error(`externalId: ${externalId} job not found`)
}
return jobs[0]
}
module.exports = {
loadCSVFromFile,
getPathnameFromCommandline,
sleep,
getJobByExternalId
}
<file_sep>'use strict';
const config = require('config')
/**
* Migrate JobCandidate status - from rejected to rejected - other.
*/
module.exports = {
up: async (queryInterface, Sequelize) => {
const tableName = `${config.DB_SCHEMA_NAME}.job_candidates`
await queryInterface.sequelize.query(
`UPDATE ${tableName} SET status = 'rejected - other' WHERE status = 'rejected'`
)
},
down: async (queryInterface, Sequelize) => {
const tableName = `${config.DB_SCHEMA_NAME}.job_candidates`
await queryInterface.sequelize.query(
`UPDATE ${tableName} SET status = 'rejected' WHERE status = 'rejected - other'`
)
}
};
<file_sep>const config = require('config')
/*
* Replace endData with duration in Job model.
*/
module.exports = {
up: async (queryInterface, Sequelize) => {
const transaction = await queryInterface.sequelize.transaction()
try {
await queryInterface.addColumn({ tableName: 'jobs', schema: config.DB_SCHEMA_NAME }, 'duration',
{ type: Sequelize.INTEGER },
{ transaction })
await queryInterface.sequelize.query(`UPDATE ${config.DB_SCHEMA_NAME}.jobs SET duration = DATE_PART('day', end_date - start_date)`,
{ transaction })
await queryInterface.removeColumn({ tableName: 'jobs', schema: config.DB_SCHEMA_NAME }, 'end_date',
{ transaction })
await transaction.commit()
} catch (err) {
await transaction.rollback()
throw err
}
},
down: async (queryInterface, Sequelize) => {
const transaction = await queryInterface.sequelize.transaction()
try {
await queryInterface.addColumn({ tableName: 'jobs', schema: config.DB_SCHEMA_NAME }, 'end_date',
{ type: Sequelize.DATE },
{ transaction })
await queryInterface.sequelize.query(`UPDATE ${config.DB_SCHEMA_NAME}.jobs SET end_date = start_date + COALESCE(duration,0) * INTERVAL '1 day'`,
{ transaction })
await queryInterface.removeColumn({ tableName: 'jobs', schema: config.DB_SCHEMA_NAME }, 'duration',
{ transaction })
await transaction.commit()
} catch (err) {
await transaction.rollback()
throw err
}
}
}
<file_sep>/**
* Contains interview routes
*/
const constants = require('../../app-constants')
module.exports = {
'/jobCandidates/:jobCandidateId/requestInterview': {
patch: {
controller: 'InterviewController',
method: 'requestInterview',
auth: 'jwt',
scopes: [constants.Scopes.CREATE_INTERVIEW, constants.Scopes.ALL_INTERVIEW]
}
},
'/jobCandidates/:jobCandidateId/updateInterview/:round': {
patch: {
controller: 'InterviewController',
method: 'partiallyUpdateInterviewByRound',
auth: 'jwt',
scopes: [constants.Scopes.UPDATE_INTERVIEW, constants.Scopes.ALL_INTERVIEW]
}
},
'/updateInterview/:id': {
patch: {
controller: 'InterviewController',
method: 'partiallyUpdateInterviewById',
auth: 'jwt',
scopes: [constants.Scopes.UPDATE_INTERVIEW, constants.Scopes.ALL_INTERVIEW]
}
},
'/jobCandidates/:jobCandidateId/interviews': {
get: {
controller: 'InterviewController',
method: 'searchInterviews',
auth: 'jwt',
scopes: [constants.Scopes.READ_INTERVIEW, constants.Scopes.ALL_INTERVIEW]
}
},
'/jobCandidates/:jobCandidateId/interviews/:round': {
get: {
controller: 'InterviewController',
method: 'getInterviewByRound',
auth: 'jwt',
scopes: [constants.Scopes.READ_INTERVIEW, constants.Scopes.ALL_INTERVIEW]
}
},
'/getInterview/:id': {
get: {
controller: 'InterviewController',
method: 'getInterviewById',
auth: 'jwt',
scopes: [constants.Scopes.READ_INTERVIEW, constants.Scopes.ALL_INTERVIEW]
}
}
}
<file_sep>/**
* This service provides operations of JobCandidate.
*/
const _ = require('lodash')
const Joi = require('joi')
const config = require('config')
const HttpStatus = require('http-status-codes')
const { Op } = require('sequelize')
const { v4: uuid } = require('uuid')
const { Scopes, UserRoles } = require('../../app-constants')
const helper = require('../common/helper')
const logger = require('../common/logger')
const errors = require('../common/errors')
const models = require('../models')
const JobService = require('./JobService')
const {
processCreate,
processUpdate,
processDelete
} = require('../esProcessors/JobCandidateProcessor')
const sequelize = models.sequelize
const NotificationSchedulerService = require('./NotificationsSchedulerService')
const JobCandidate = models.JobCandidate
const esClient = helper.getESClient()
/**
* Check user permission for getting job candidate.
*
* @param {Object} currentUser the user who perform this operation.
* @param {String} jobId the job id
* @returns {undefined}
*/
async function _checkUserPermissionForGetJobCandidate (currentUser, jobId) {
if (!currentUser.hasManagePermission && !currentUser.isMachine && !currentUser.isConnectManager) {
const job = await models.Job.findById(jobId)
await helper.checkIsMemberOfProject(currentUser.userId, job.projectId)
}
}
/**
* Returns field omit list, based on user access level.
*
* @param {Object} currentUser the user who perform this operation.
* @returns {Array} the field list to omit from the jobCandidate object
*/
function getJobCandidateOmitList (currentUser) {
// check M2M scopes for Interviews
if (currentUser.isMachine) {
const interviewsAllowedScopes = [Scopes.READ_INTERVIEW, Scopes.ALL_INTERVIEW]
if (!currentUser.scopes || !helper.checkIfExists(interviewsAllowedScopes, currentUser.scopes)) {
return ['interviews']
}
return []
}
return currentUser.hasManagePermission ? [] : ['interviews']
}
/**
* Get jobCandidate by id
* @param {Object} currentUser the user who perform this operation.
* @param {String} id the jobCandidate id
* @param {Boolean} fromDb flag if query db for data or not
* @returns {Object} the jobCandidate
*/
async function getJobCandidate (currentUser, id, fromDb = false) {
const omitList = getJobCandidateOmitList(currentUser)
if (!fromDb) {
try {
const jobCandidate = await esClient.get({
index: config.esConfig.ES_INDEX_JOB_CANDIDATE,
id
})
await _checkUserPermissionForGetJobCandidate(currentUser, jobCandidate.body._source.jobId) // check user permisson
const jobCandidateRecord = { id: jobCandidate.body._id, ...jobCandidate.body._source }
return _.omit(jobCandidateRecord, omitList)
} catch (err) {
if (helper.isDocumentMissingException(err)) {
throw new errors.NotFoundError(`id: ${id} "JobCandidate" not found`)
}
if (err.httpStatus === HttpStatus.FORBIDDEN) {
throw err
}
logger.logFullError(err, { component: 'JobCandidateService', context: 'getJobCandidate' })
}
}
logger.info({ component: 'JobCandidateService', context: 'getJobCandidate', message: 'try to query db for data' })
// include interviews if user has permission
const include = []
const hasInterviewPermision = !_.includes(omitList, 'interviews')
if (hasInterviewPermision) {
include.push({ model: models.Interview, as: 'interviews' })
}
const jobCandidate = await JobCandidate.findById(id, include)
await _checkUserPermissionForGetJobCandidate(currentUser, jobCandidate.jobId) // check user permission
return jobCandidate.dataValues
}
getJobCandidate.schema = Joi.object().keys({
currentUser: Joi.object().required(),
id: Joi.string().guid().required(),
fromDb: Joi.boolean()
}).required()
/**
* Create jobCandidate
* @params {Object} currentUser the user who perform this operation
* @params {Object} jobCandidate the jobCandidate to be created
* @returns {Object} the created jobCandidate
*/
async function createJobCandidate (currentUser, jobCandidate) {
// check user permission
if (!currentUser.hasManagePermission && !currentUser.isMachine) {
throw new errors.ForbiddenError('You are not allowed to perform this action!')
}
await helper.ensureJobById(jobCandidate.jobId) // ensure job exists
await helper.ensureUserById(jobCandidate.userId) // ensure user exists
jobCandidate.id = uuid()
jobCandidate.createdBy = await helper.getUserId(currentUser.userId)
let entity
try {
await sequelize.transaction(async (t) => {
const created = await JobCandidate.create(jobCandidate, { transaction: t })
entity = created.toJSON()
await processCreate(entity)
})
} catch (e) {
if (entity) {
helper.postErrorEvent(config.TAAS_ERROR_TOPIC, entity, 'jobcandidate.create')
}
throw e
}
await helper.postEvent(config.TAAS_JOB_CANDIDATE_CREATE_TOPIC, entity)
return entity
}
createJobCandidate.schema = Joi.object().keys({
currentUser: Joi.object().required(),
jobCandidate: Joi.object().keys({
status: Joi.jobCandidateStatus().default('open'),
jobId: Joi.string().uuid().required(),
userId: Joi.string().uuid().required(),
externalId: Joi.string().allow(null),
resume: Joi.string().uri().allow(null),
remark: Joi.stringAllowEmpty().allow(null)
}).required()
}).required()
/**
* Update jobCandidate
* @params {Object} currentUser the user who perform this operation
* @params {String} id the jobCandidate id
* @params {Object} data the data to be updated
* @returns {Object} the updated jobCandidate
*/
async function updateJobCandidate (currentUser, id, data) {
const jobCandidate = await JobCandidate.findById(id)
const oldValue = jobCandidate.toJSON()
const userId = await helper.getUserId(currentUser.userId)
// check user permission
if (!currentUser.hasManagePermission && !currentUser.isMachine) {
const job = await models.Job.findById(jobCandidate.jobId)
await helper.checkIsMemberOfProject(currentUser.userId, job.projectId)
}
data.updatedBy = userId
let entity
try {
await sequelize.transaction(async (t) => {
const updated = await jobCandidate.update(data, { transaction: t })
entity = updated.toJSON()
await processUpdate(entity)
})
} catch (e) {
if (entity) {
helper.postErrorEvent(config.TAAS_ERROR_TOPIC, entity, 'jobcandidate.update')
}
throw e
}
await helper.postEvent(config.TAAS_JOB_CANDIDATE_UPDATE_TOPIC, entity, { oldValue: oldValue })
const result = _.assign(jobCandidate.dataValues, data)
return result
}
/**
* Partially update jobCandidate by id
* @params {Object} currentUser the user who perform this operation
* @params {String} id the jobCandidate id
* @params {Object} data the data to be updated
* @returns {Object} the updated jobCandidate
*/
async function partiallyUpdateJobCandidate (currentUser, id, data) {
return updateJobCandidate(currentUser, id, data)
}
partiallyUpdateJobCandidate.schema = Joi.object().keys({
currentUser: Joi.object().required(),
id: Joi.string().uuid().required(),
data: Joi.object().keys({
status: Joi.jobCandidateStatus(),
externalId: Joi.string().allow(null),
viewedByCustomer: Joi.boolean().allow(null),
resume: Joi.string().uri().allow(null),
remark: Joi.stringAllowEmpty().allow(null)
}).required()
}).required()
/**
* Fully update jobCandidate by id
* @params {Object} currentUser the user who perform this operation
* @params {String} id the jobCandidate id
* @params {Object} data the data to be updated
* @returns {Object} the updated jobCandidate
*/
async function fullyUpdateJobCandidate (currentUser, id, data) {
await helper.ensureJobById(data.jobId) // ensure job exists
await helper.ensureUserById(data.userId) // ensure user exists
return updateJobCandidate(currentUser, id, data)
}
fullyUpdateJobCandidate.schema = Joi.object()
.keys({
currentUser: Joi.object().required(),
id: Joi.string().uuid().required(),
data: Joi.object()
.keys({
jobId: Joi.string().uuid().required(),
userId: Joi.string().uuid().required(),
status: Joi.jobCandidateStatus().default('open'),
viewedByCustomer: Joi.boolean().allow(null),
externalId: Joi.string().allow(null).default(null),
resume: Joi.string().uri().allow('').allow(null).default(null),
remark: Joi.stringAllowEmpty().allow(null)
})
.required()
})
.required()
/**
* Delete jobCandidate by id
* @params {Object} currentUser the user who perform this operation
* @params {String} id the jobCandidate id
*/
async function deleteJobCandidate (currentUser, id) {
// check user permission
if (!currentUser.hasManagePermission && !currentUser.isMachine) {
throw new errors.ForbiddenError('You are not allowed to perform this action!')
}
const jobCandidate = await JobCandidate.findById(id)
try {
await sequelize.transaction(async (t) => {
await jobCandidate.destroy({ transaction: t })
await processDelete({ id })
})
} catch (e) {
helper.postErrorEvent(config.TAAS_ERROR_TOPIC, { id }, 'jobcandidate.delete')
throw e
}
await helper.postEvent(config.TAAS_JOB_CANDIDATE_DELETE_TOPIC, { id })
}
deleteJobCandidate.schema = Joi.object().keys({
currentUser: Joi.object().required(),
id: Joi.string().uuid().required()
}).required()
/**
* List resourceBookings
* @param {Object} currentUser the user who perform this operation.
* @params {Object} criteria the search criteria
* @returns {Object} the search result, contain total/page/perPage and result array
*/
async function searchJobCandidates (currentUser, criteria) {
// check user permission
if (!currentUser.hasManagePermission && !currentUser.isMachine && !currentUser.isConnectManager) {
if (!criteria.jobId) { // regular user can only search with filtering by "jobId"
throw new errors.ForbiddenError('Not allowed without filtering by "jobId"')
}
await JobService.getJob(currentUser, criteria.jobId) // check whether user can access the job associated with the jobCandidate
}
const omitList = getJobCandidateOmitList(currentUser)
const page = criteria.page > 0 ? criteria.page : 1
const perPage = criteria.perPage > 0 ? criteria.perPage : 20
if (!criteria.sortBy) {
criteria.sortBy = 'id'
}
if (!criteria.sortOrder) {
criteria.sortOrder = 'desc'
}
try {
const sort = [{ [criteria.sortBy === 'id' ? '_id' : criteria.sortBy]: { order: criteria.sortOrder } }]
const esQuery = {
index: config.get('esConfig.ES_INDEX_JOB_CANDIDATE'),
body: {
query: {
bool: {
must: []
}
},
from: (page - 1) * perPage,
size: perPage,
sort
}
}
_.each(_.pick(criteria, ['jobId', 'userId', 'status', 'externalId']), (value, key) => {
esQuery.body.query.bool.must.push({
term: {
[key]: {
value
}
}
})
})
// if criteria contains statuses, filter statuses with this value
if (criteria.statuses && criteria.statuses.length > 0) {
esQuery.body.query.bool.filter.push({
terms: {
status: criteria.statuses
}
})
}
logger.debug({ component: 'JobCandidateService', context: 'searchJobCandidates', message: `Query: ${JSON.stringify(esQuery)}` })
const { body } = await esClient.search(esQuery)
return {
total: body.hits.total.value,
page,
perPage,
result: _.map(body.hits.hits, (hit) => {
const obj = _.cloneDeep(hit._source)
obj.id = hit._id
return _.omit(obj, omitList)
})
}
} catch (err) {
logger.logFullError(err, { component: 'JobCandidateService', context: 'searchJobCandidates' })
}
logger.info({ component: 'JobCandidateService', context: 'searchJobCandidates', message: 'fallback to DB query' })
const filter = { [Op.and]: [] }
_.each(_.pick(criteria, ['jobId', 'userId', 'status', 'externalId']), (value, key) => {
filter[Op.and].push({ [key]: value })
})
if (criteria.statuses && criteria.statuses.length > 0) {
filter[Op.and].push({ status: criteria.statuses })
}
// include interviews if user has permission
const include = []
const hasInterviewPermision = !_.includes(omitList, 'interviews')
if (hasInterviewPermision) {
include.push({ model: models.Interview, as: 'interviews' })
}
const jobCandidates = await JobCandidate.findAll({
where: filter,
include,
offset: ((page - 1) * perPage),
limit: perPage,
order: [[criteria.sortBy, criteria.sortOrder]]
})
const total = await JobCandidate.count({ where: filter })
return {
fromDb: true,
total,
page,
perPage,
result: _.map(jobCandidates, jobCandidate => _.omit(jobCandidate.dataValues, omitList))
}
}
searchJobCandidates.schema = Joi.object().keys({
currentUser: Joi.object().required(),
criteria: Joi.object().keys({
page: Joi.number().integer(),
perPage: Joi.number().integer(),
sortBy: Joi.string().valid('id', 'status'),
sortOrder: Joi.string().valid('desc', 'asc'),
jobId: Joi.string().uuid(),
userId: Joi.string().uuid(),
status: Joi.jobCandidateStatus(),
statuses: Joi.array().items(Joi.jobCandidateStatus()),
externalId: Joi.string()
}).required()
}).required()
/**
* Download jobCandidate resume
* @params {Object} currentUser the user who perform this operation
* @params {String} id the jobCandidate id
*/
async function downloadJobCandidateResume (currentUser, id) {
const jobCandidate = await JobCandidate.findById(id)
const { id: currentUserUserId } = await helper.getUserByExternalId(currentUser.userId)
// customer role
if (!jobCandidate.viewedByCustomer && currentUserUserId !== jobCandidate.userId && currentUser.roles.length === 1 && currentUser.roles[0] === UserRoles.TopcoderUser) {
try {
const job = await models.Job.findById(jobCandidate.jobId)
const { handle } = await helper.getUserById(jobCandidate.userId, true)
await NotificationSchedulerService.sendNotification(currentUser, {
template: 'taas.notification.job-candidate-resume-viewed',
recipients: [{ handle }],
data: {
jobCandidateUserHandle: handle,
jobName: job.title,
description: 'Client Viewed Resume',
notificationType: {
jobCandidateResumeViewed: true
}
}
})
await updateJobCandidate(currentUser, jobCandidate.id, { viewedByCustomer: true })
} catch (err) {
logger.logFullError(err, { component: 'JobCandidateService', context: 'downloadJobCandidateResume' })
}
}
return jobCandidate.resume
}
downloadJobCandidateResume.schema = Joi.object().keys({
currentUser: Joi.object().required(),
id: Joi.string().uuid().required()
}).required()
module.exports = {
getJobCandidate,
createJobCandidate,
partiallyUpdateJobCandidate,
fullyUpdateJobCandidate,
deleteJobCandidate,
searchJobCandidates,
downloadJobCandidateResume
}
<file_sep>/*
* Handle events for JobCandidate.
*/
const { Op } = require('sequelize')
const _ = require('lodash')
const config = require('config')
const models = require('../models')
const logger = require('../common/logger')
const helper = require('../common/helper')
const JobService = require('../services/JobService')
const JobCandidateService = require('../services/JobCandidateService')
/**
* Once we create at least one JobCandidate for a Job, the Job status should be changed to in-review.
*
* @param {Object} payload the event payload
* @returns {undefined}
*/
async function inReviewJob (payload) {
const job = await models.Job.findById(payload.value.jobId)
if (job.status === 'in-review') {
logger.debug({
component: 'JobCandidateEventHandler',
context: 'inReviewJob',
message: `id: ${job.id} job is already in in-review status`
})
return
}
if (payload.value.status === 'open') {
await JobService.partiallyUpdateJob(
helper.getAuditM2Muser(),
job.id,
{ status: 'in-review' }
).then(result => {
logger.info({
component: 'JobCandidateEventHandler',
context: 'inReviewJob',
message: `id: ${result.id} job got in-review status.`
})
})
} else {
logger.debug({
component: 'JobCandidateEventHandler',
context: 'inReviewJob',
message: `id: ${payload.value.id} candidate is not in open status`
})
}
}
/**
* Actual Update Job Candidates
*
* @param {*} statuses the source status we'll update
* @param {*} userId the userID
*/
async function updateJobCandidates (statuses, userId) {
logger.info({
component: 'JobCandidateEventHandler',
context: 'updateJobCandidates',
message: `Update jobCandidates for user ${userId}`
})
const filter = { [Op.and]: [] }
filter[Op.and].push({ status: statuses })
filter[Op.and].push({ userId: userId })
const candidates = await models.JobCandidate.findAll({
where: filter
})
if (candidates && candidates.length > 0) {
_.each(candidates, async (candidate) => {
logger.info({
component: 'JobCandidateEventHandler',
context: 'updateJobCandidates',
message: `Begin update id: ${candidate.id}' candidate with ${candidate.status} status into ${config.WITHDRAWN_STATUS_CHANGE_MAPPING[candidate.status]} for userId: ${userId}`
})
await JobCandidateService.partiallyUpdateJobCandidate(
helper.getAuditM2Muser(),
candidate.id,
{ status: config.WITHDRAWN_STATUS_CHANGE_MAPPING[candidate.status] }
).then(result => {
logger.info({
component: 'JobCandidateEventHandler',
context: 'updateJobCandidates',
message: `Finish update id: ${result.id}' candidate into ${result.status} status for userId: ${userId}`
})
})
})
} else {
logger.info({
component: 'JobCandidateEventHandler',
context: 'updateJobCandidates',
message: `There are not jobCandidates for user ${userId} that required to be updated.`
})
}
}
/**
* Update Job Candidates based on business rules
*
* @param {*} payload the updated jobCandidate info
*/
async function withDrawnJobCandidates (payload) {
const jobCandidate = payload.value
if (jobCandidate.status === 'placed') {
const job = await models.Job.findById(payload.value.jobId)
if (job.hoursPerWeek > config.JOBS_HOUR_PER_WEEK) {
// find all these user's open job Candidate and mark the status as withdrawn or withdrawn-prescreen
logger.info({
component: 'JobCandidateEventHandler',
context: 'withDrawnJobCandidates',
message: `Begin update jobCandidates as ${payload.value.id} candidate's new gig is requiring more than 20 hrs per week`
})
await updateJobCandidates(['applied', 'skills-test', 'phone-screen', 'open', 'interview', 'selected', 'offered'], payload.value.userId)
logger.info({
component: 'JobCandidateEventHandler',
context: 'withDrawnJobCandidates',
message: `Finish update jobCandidates as ${payload.value.id} candidate`
})
} else {
logger.debug({
component: 'JobCandidateEventHandler',
context: 'withDrawnJobCandidates',
message: `id: ${payload.value.id} candidate is not placing on a gig requiring 20 hrs per week`
})
}
}
}
/**
* Process job candidate create event.
*
* @param {Object} payload the event payload
* @returns {undefined}
*/
async function processCreate (payload) {
await inReviewJob(payload)
if (payload.value.status === 'placed') {
await withDrawnJobCandidates(payload)
}
if (payload.value.status === 'selected') {
await sendJobCandidateSelectedNotification(payload)
}
}
/**
* Send job candidate selected notification.
*
* @param {Object} payload the event payload
* @returns {undefined}
*/
async function sendJobCandidateSelectedNotification (payload) {
const jobCandidate = payload.value
const job = await models.Job.findById(jobCandidate.jobId)
const user = await helper.getUserById(jobCandidate.userId)
const template = helper.getEmailTemplatesForKey('notificationEmailTemplates')['taas.notification.job-candidate-selected']
const project = await helper.getProjectById({ isMachine: true }, job.projectId)
const jobUrl = `${config.TAAS_APP_URL}/${project.id}/positions/${job.id}`
const rcrmJobUrl = `${config.RCRM_APP_URL}/job/${job.externalId}`
const teamUrl = `${config.TAAS_APP_URL}/${project.id}`
const data = {
subject: template.subject,
teamName: project.name,
teamUrl,
jobTitle: job.title,
jobDuration: job.duration,
jobStartDate: helper.formatDateEDT(job.startDate),
userHandle: user.handle,
jobUrl,
rcrmJobUrl,
notificationType: {
candidateSelected: true
},
description: 'Job Candidate Selected'
}
data.subject = helper.substituteStringByObject(data.subject, data)
const emailData = {
serviceId: 'email',
type: 'taas.notification.job-candidate-selected',
details: {
from: template.from,
recipients: (template.recipients || []).map(email => ({ email })),
data,
sendgridTemplateId: template.sendgridTemplateId,
version: 'v3'
}
}
const slackData = {
serviceId: 'slack',
type: 'taas.notification.job-candidate-selected',
details: {
channel: config.NOTIFICATION_SLACK_CHANNEL,
text: data.subject,
blocks: [
{
type: 'section',
text: {
type: 'mrkdwn',
text: '*:ballot_box_with_check: Job Candidate is Selected*'
}
},
{
type: 'section',
text: {
type: 'mrkdwn',
text: [
`*Team Name*: <${teamUrl}|${project.name}>`,
`*Job Title*: <${jobUrl}|${job.title}> (<${rcrmJobUrl}|Open in RCRM>)`,
`*Job Start Date*: ${helper.formatDateEDT(job.startDate)}`,
`*Job Duration*: ${job.duration}`,
`*Job Candidate*: ${user.handle}`
].join('\n')
}
}
]
}
}
await helper.postEvent(config.NOTIFICATIONS_CREATE_TOPIC, {
notifications: [emailData, slackData]
})
logger.debug({
component: 'JobCandidateEventHandler',
context: 'sendJobCandidateSelectedNotification',
message: `teamName: ${project.name}, jobTitle: ${payload.value.title}, jobDuration: ${payload.value.duration}, jobStartDate: ${payload.value.startDate}`
})
}
/**
* Process job candidate update event.
*
* @param {Object} payload the event payload
* @returns {undefined}
*/
async function processUpdate (payload) {
await inReviewJob(payload)
if (payload.value.status === 'placed' && payload.options.oldValue.status !== 'placed') {
await withDrawnJobCandidates(payload)
}
if (payload.value.status === 'selected' && payload.options.oldValue.status !== 'selected') {
await sendJobCandidateSelectedNotification(payload)
}
}
module.exports = {
processCreate,
processUpdate
}
<file_sep>/**
* App constants
*/
const UserRoles = {
BookingManager: 'bookingmanager',
Administrator: 'administrator',
ConnectManager: 'Connect Manager',
TopcoderUser: 'Topcoder User'
}
const FullManagePermissionRoles = [
UserRoles.BookingManager,
UserRoles.Administrator
]
const Scopes = {
// job
READ_JOB: 'read:taas-jobs',
CREATE_JOB: 'create:taas-jobs',
UPDATE_JOB: 'update:taas-jobs',
DELETE_JOB: 'delete:taas-jobs',
ALL_JOB: 'all:taas-jobs',
// job candidate
READ_JOB_CANDIDATE: 'read:taas-jobCandidates',
CREATE_JOB_CANDIDATE: 'create:taas-jobCandidates',
UPDATE_JOB_CANDIDATE: 'update:taas-jobCandidates',
DELETE_JOB_CANDIDATE: 'delete:taas-jobCandidates',
ALL_JOB_CANDIDATE: 'all:taas-jobCandidates',
// resource booking
READ_RESOURCE_BOOKING: 'read:taas-resourceBookings',
CREATE_RESOURCE_BOOKING: 'create:taas-resourceBookings',
UPDATE_RESOURCE_BOOKING: 'update:taas-resourceBookings',
DELETE_RESOURCE_BOOKING: 'delete:taas-resourceBookings',
ALL_RESOURCE_BOOKING: 'all:taas-resourceBookings',
// taas-team
READ_TAAS_TEAM: 'read:taas-teams',
CREATE_ROLE_SEARCH_REQUEST: 'create:taas-roleSearchRequests',
CREATE_TAAS_TEAM: 'create:taas-teams',
// work period
READ_WORK_PERIOD: 'read:taas-workPeriods',
CREATE_WORK_PERIOD: 'create:taas-workPeriods',
UPDATE_WORK_PERIOD: 'update:taas-workPeriods',
DELETE_WORK_PERIOD: 'delete:taas-workPeriods',
ALL_WORK_PERIOD: 'all:taas-workPeriods',
// work period payment
READ_WORK_PERIOD_PAYMENT: 'read:taas-workPeriodPayments',
CREATE_WORK_PERIOD_PAYMENT: 'create:taas-workPeriodPayments',
UPDATE_WORK_PERIOD_PAYMENT: 'update:taas-workPeriodPayments',
ALL_WORK_PERIOD_PAYMENT: 'all:taas-workPeriodPayments',
// interview
READ_INTERVIEW: 'read:taas-interviews',
CREATE_INTERVIEW: 'create:taas-interviews',
UPDATE_INTERVIEW: 'update:taas-interviews',
ALL_INTERVIEW: 'all:taas-interviews',
// role
READ_ROLE: 'read:taas-roles',
CREATE_ROLE: 'create:taas-roles',
UPDATE_ROLE: 'update:taas-roles',
DELETE_ROLE: 'delete:taas-roles',
ALL_ROLE: 'all:taas-roles'
}
// Interview related constants
const Interviews = {
Status: {
Scheduling: 'Scheduling',
Scheduled: 'Scheduled',
RequestedForReschedule: 'Requested for reschedule',
Rescheduled: 'Rescheduled',
Completed: 'Completed',
Cancelled: 'Cancelled'
},
// key: template name in x.ai, value: duration
XaiTemplate: {
'interview-30': 30,
'interview-60': 60
},
MaxAllowedCount: 3
}
const ChallengeStatus = {
DRAFT: 'Draft',
ACTIVE: 'Active',
COMPLETED: 'Completed'
}
/**
* Aggregate payment status for Work Period which is determined
* based on the payments the Work Period has using `PaymentStatusRules`
*/
const AggregatePaymentStatus = {
PENDING: 'pending',
IN_PROGRESS: 'in-progress',
PARTIALLY_COMPLETED: 'partially-completed',
COMPLETED: 'completed',
NO_DAYS: 'no-days'
}
/**
* `WorkPeriodPayment.status` - possible values
*/
const WorkPeriodPaymentStatus = {
COMPLETED: 'completed',
SCHEDULED: 'scheduled',
IN_PROGRESS: 'in-progress',
FAILED: 'failed',
CANCELLED: 'cancelled'
}
/**
* The rules how to determine WorkPeriod.paymentStatus based on the payments
*
* The top rule has priority over the bottom rules.
*/
const PaymentStatusRules = [
{ paymentStatus: AggregatePaymentStatus.IN_PROGRESS, condition: { hasWorkPeriodPaymentStatus: [WorkPeriodPaymentStatus.SCHEDULED, WorkPeriodPaymentStatus.IN_PROGRESS] } },
{ paymentStatus: AggregatePaymentStatus.COMPLETED, condition: { hasWorkPeriodPaymentStatus: [WorkPeriodPaymentStatus.COMPLETED], hasDueDays: false } },
{ paymentStatus: AggregatePaymentStatus.PARTIALLY_COMPLETED, condition: { hasWorkPeriodPaymentStatus: [WorkPeriodPaymentStatus.COMPLETED], hasDueDays: true } },
{ paymentStatus: AggregatePaymentStatus.PENDING, condition: { hasDueDays: true } },
{ paymentStatus: AggregatePaymentStatus.NO_DAYS, condition: { daysWorked: 0 } }
]
/**
* The WorkPeriodPayment.status values which we take into account when calculate
* aggregate values inside WorkPeriod:
* - daysPaid
* - paymentTotal
* - paymentStatus
*/
const ActiveWorkPeriodPaymentStatuses = [
WorkPeriodPaymentStatus.SCHEDULED,
WorkPeriodPaymentStatus.IN_PROGRESS,
WorkPeriodPaymentStatus.COMPLETED
]
const WorkPeriodPaymentUpdateStatus = {
SCHEDULED: 'scheduled',
CANCELLED: 'cancelled'
}
const PaymentProcessingSwitch = {
ON: 'ON',
OFF: 'OFF'
}
const WeeklySurveySwitch = {
ON: 'ON',
OFF: 'OFF'
}
const PaymentSchedulerStatus = {
START_PROCESS: 'start-process',
CREATE_CHALLENGE: 'create-challenge',
ASSIGN_MEMBER: 'assign-member',
ACTIVATE_CHALLENGE: 'activate-challenge',
GET_USER_ID: 'get-userId',
CLOSE_CHALLENGE: 'close-challenge'
}
const JobStatus = {
OPEN: 'open'
}
const JobCandidateStatus = {
INTERVIEW: 'interview'
}
module.exports = {
UserRoles,
FullManagePermissionRoles,
Scopes,
Interviews,
ChallengeStatus,
AggregatePaymentStatus,
WorkPeriodPaymentStatus,
WorkPeriodPaymentUpdateStatus,
PaymentSchedulerStatus,
PaymentProcessingSwitch,
PaymentStatusRules,
WeeklySurveySwitch,
ActiveWorkPeriodPaymentStatuses,
JobStatus,
JobCandidateStatus
}
<file_sep>/**
* This service provides operations of Roles.
*/
const _ = require('lodash')
const config = require('config')
const Joi = require('joi')
const { Op } = require('sequelize')
const uuid = require('uuid')
const helper = require('../common/helper')
const logger = require('../common/logger')
const errors = require('../common/errors')
const models = require('../models')
const {
processCreate,
processUpdate,
processDelete
} = require('../esProcessors/RoleProcessor')
const sequelize = models.sequelize
const Role = models.Role
const esClient = helper.getESClient()
/**
* Check user permission for deleting, creating or updating role.
* @param {Object} currentUser the user who perform this operation.
* @returns {undefined}
*/
async function _checkUserPermissionForWriteDeleteRole (currentUser) {
if (!currentUser.hasManagePermission && !currentUser.isMachine) {
throw new errors.ForbiddenError('You are not allowed to perform this action!')
}
}
/**
* Cleans and validates skill names using skills service
* @param {Array<string>} skills array of skill names to validate
* @returns {undefined}
*/
async function _cleanAndValidateSkillNames (skills) {
// remove duplicates, leading and trailing whitespaces, empties.
const cleanedSkills = _.uniq(_.filter(_.map(skills, skill => _.trim(skill)), skill => !_.isEmpty(skill)))
if (cleanedSkills.length > 0) {
// search skills if they are exists
const result = await helper.getAllTopcoderSkills({ name: _.join(cleanedSkills, ',') })
const skillNames = _.map(result, 'name')
// find skills that not valid
const unValidSkills = _.differenceBy(cleanedSkills, skillNames, _.toLower)
if (unValidSkills.length > 0) {
throw new errors.BadRequestError(`skills: "${unValidSkills}" are not valid`)
}
return _.intersectionBy(skillNames, cleanedSkills, _.toLower)
} else {
return null
}
}
/**
* Check user permission for deleting, creating or updating role.
* @param {Object} currentUser the user who perform this operation.
* @returns {undefined}
*/
async function _checkIfSameNamedRoleExists (roleName) {
// We can't create another Role with the same name
const role = await Role.findOne({
where: {
name: { [Op.iLike]: roleName }
},
raw: true
})
if (role) {
throw new errors.BadRequestError(`Role: "${role.name}" is already exists.`)
}
}
/**
* Get role by id
* @param {Object} currentUser the user who perform this operation.
* @param {String} id the role id
* @param {Boolean} fromDb flag if query db for data or not
* @returns {Object} the role
*/
async function getRole (id, fromDb = false) {
if (!fromDb) {
try {
const role = await esClient.get({
index: config.esConfig.ES_INDEX_ROLE,
id
})
return { id: role.body._id, ...role.body._source }
} catch (err) {
if (helper.isDocumentMissingException(err)) {
throw new errors.NotFoundError(`id: ${id} "Role" not found`)
}
}
}
logger.info({ component: 'RoleService', context: 'getRole', message: 'try to query db for data' })
const role = await Role.findById(id)
return role.toJSON()
}
getRole.schema = Joi.object().keys({
id: Joi.string().uuid().required(),
fromDb: Joi.boolean()
}).required()
/**
* Create role
* @param {Object} currentUser the user who perform this operation
* @param {Object} role the role to be created
* @returns {Object} the created role
*/
async function createRole (currentUser, role) {
// check permission
await _checkUserPermissionForWriteDeleteRole(currentUser)
// check if another Role with the same name exists.
await _checkIfSameNamedRoleExists(role.name)
// clean and validate skill names
if (role.listOfSkills) {
role.listOfSkills = await _cleanAndValidateSkillNames(role.listOfSkills)
}
role.id = uuid.v4()
role.createdBy = await helper.getUserId(currentUser.userId)
let entity
try {
await sequelize.transaction(async (t) => {
const created = await Role.create(role, { transaction: t })
entity = created.toJSON()
await processCreate(entity)
})
} catch (e) {
if (entity) {
helper.postErrorEvent(config.TAAS_ERROR_TOPIC, entity, 'role.create')
}
throw e
}
await helper.postEvent(config.TAAS_ROLE_CREATE_TOPIC, entity)
return entity
}
createRole.schema = Joi.object().keys({
currentUser: Joi.object().required(),
role: Joi.object().keys({
name: Joi.string().max(50).required(),
description: Joi.string().max(1000),
listOfSkills: Joi.array().items(Joi.string().max(50).required()),
rates: Joi.array().items(Joi.object().keys({
global: Joi.smallint().min(1),
inCountry: Joi.smallint().min(1),
offShore: Joi.smallint().min(1),
niche: Joi.smallint().min(1),
rate30Niche: Joi.smallint().min(1),
rate30Global: Joi.smallint().min(1),
rate30InCountry: Joi.smallint().min(1),
rate30OffShore: Joi.smallint().min(1),
rate20Niche: Joi.smallint().min(1),
rate20Global: Joi.smallint().min(1),
rate20InCountry: Joi.smallint().min(1),
rate20OffShore: Joi.smallint().min(1)
}).required()).required(),
numberOfMembers: Joi.number().integer().min(1),
numberOfMembersAvailable: Joi.smallint().min(1),
imageUrl: Joi.string().uri().max(255),
timeToCandidate: Joi.smallint().min(1),
timeToInterview: Joi.smallint().min(1)
}).required()
}).required()
/**
* Partially Update role
* @param {Object} currentUser the user who perform this operation
* @param {String} id the role id
* @param {Object} data the data to be updated
* @returns {Object} the updated role
*/
async function updateRole (currentUser, id, data) {
// check permission
await _checkUserPermissionForWriteDeleteRole(currentUser)
const role = await Role.findById(id)
const oldValue = role.toJSON()
// if name is changed, check if another Role with the same name exists.
if (data.name && data.name.toLowerCase() !== role.dataValues.name.toLowerCase()) {
await _checkIfSameNamedRoleExists(data.name)
}
// clean and validate skill names
if (data.listOfSkills) {
data.listOfSkills = await _cleanAndValidateSkillNames(data.listOfSkills)
}
data.updatedBy = await helper.getUserId(currentUser.userId)
let entity
try {
await sequelize.transaction(async (t) => {
const updated = await role.update(data, { transaction: t })
entity = updated.toJSON()
await processUpdate(entity)
})
} catch (e) {
if (entity) {
helper.postErrorEvent(config.TAAS_ERROR_TOPIC, entity, 'role.update')
}
throw e
}
await helper.postEvent(config.TAAS_RESOURCE_BOOKING_UPDATE_TOPIC, entity, { oldValue: oldValue })
return entity
}
updateRole.schema = Joi.object().keys({
currentUser: Joi.object().required(),
id: Joi.string().uuid().required(),
data: Joi.object().keys({
name: Joi.string().max(50),
description: Joi.string().max(1000).allow(null),
listOfSkills: Joi.array().items(Joi.string().max(50).required()).allow(null),
rates: Joi.array().items(Joi.object().keys({
global: Joi.smallint().min(1).required(),
inCountry: Joi.smallint().min(1).required(),
offShore: Joi.smallint().min(1).required(),
niche: Joi.smallint().min(1),
rate30Niche: Joi.smallint().min(1),
rate30Global: Joi.smallint().min(1),
rate30InCountry: Joi.smallint().min(1),
rate30OffShore: Joi.smallint().min(1),
rate20Global: Joi.smallint().min(1),
rate20Niche: Joi.smallint().min(1),
rate20InCountry: Joi.smallint().min(1),
rate20OffShore: Joi.smallint().min(1)
}).required()),
numberOfMembers: Joi.number().integer().min(1).allow(null),
numberOfMembersAvailable: Joi.smallint().min(1).allow(null),
imageUrl: Joi.string().uri().max(255).allow(null),
timeToCandidate: Joi.smallint().min(1).allow(null),
timeToInterview: Joi.smallint().min(1).allow(null)
}).required()
}).required()
/**
* Delete role by id
* @param {Object} currentUser the user who perform this operation
* @param {String} id the role id
*/
async function deleteRole (currentUser, id) {
// check permission
await _checkUserPermissionForWriteDeleteRole(currentUser)
const role = await Role.findById(id)
try {
await sequelize.transaction(async (t) => {
await role.destroy({ transaction: t })
await processDelete({ id })
})
} catch (e) {
helper.postErrorEvent(config.TAAS_ERROR_TOPIC, { id }, 'role.delete')
throw e
}
await helper.postEvent(config.TAAS_ROLE_DELETE_TOPIC, { id })
}
deleteRole.schema = Joi.object().keys({
currentUser: Joi.object().required(),
id: Joi.string().uuid().required()
}).required()
/**
* List roles
* @param {Object} currentUser the user who perform this operation.
* @param {Object} criteria the search criteria
* @returns {Object} the search result
*/
async function searchRoles (criteria) {
// clean skill names and convert into an array
criteria.skillsList = _.filter(_.map(_.split(criteria.skillsList, ','), skill => _.trim(skill)), skill => !_.isEmpty(skill))
try {
const esQuery = {
index: config.get('esConfig.ES_INDEX_ROLE'),
body: {
query: {
bool: {
must: []
}
},
size: 10000,
sort: [{ name: { order: 'asc' } }]
}
}
// Apply skill name filters. listOfSkills array should include all skills provided in criteria.
_.each(criteria.skillsList, skill => {
esQuery.body.query.bool.must.push({
term: {
listOfSkills: skill
}
})
})
// Apply name filter, allow partial match
if (criteria.keyword) {
esQuery.body.query.bool.must.push({
wildcard: {
name: `*${criteria.keyword}*`
}
})
}
logger.debug({ component: 'RoleService', context: 'searchRoles', message: `Query: ${JSON.stringify(esQuery)}` })
const { body } = await esClient.search(esQuery)
return _.map(body.hits.hits, (hit) => _.assign(hit._source, { id: hit._id }))
} catch (err) {
logger.logFullError(err, { component: 'RoleService', context: 'searchRoles' })
}
logger.info({ component: 'RoleService', context: 'searchRoles', message: 'fallback to DB query' })
const filter = { [Op.and]: [] }
// Apply skill name filters. listOfSkills array should include all skills provided in criteria.
if (criteria.skillsList) {
_.each(criteria.skillsList, skill => {
filter[Op.and].push(models.Sequelize.literal(`LOWER('${skill}') in (SELECT lower(x) FROM unnest("list_of_skills"::text[]) x)`))
})
}
// Apply name filter, allow partial match and ignore case
if (criteria.keyword) {
filter[Op.and].push({ name: { [Op.iLike]: `%${criteria.keyword}%` } })
}
const queryCriteria = {
where: filter,
order: [['name', 'asc']]
}
const roles = await Role.findAll(queryCriteria)
return roles
}
searchRoles.schema = Joi.object().keys({
criteria: Joi.object().keys({
skillsList: Joi.string(),
keyword: Joi.string()
}).required()
}).required()
module.exports = {
getRole,
createRole,
updateRole,
deleteRole,
searchRoles
}
<file_sep>'use strict';
const config = require('config')
/**
* Migrate JobCandidate status - from shortlist to selected.
*/
module.exports = {
up: async (queryInterface, Sequelize) => {
const tableName = `${config.DB_SCHEMA_NAME}.job_candidates`
await queryInterface.sequelize.query(`UPDATE ${tableName} SET status = 'interview' WHERE status = 'shortlist'`)
},
down: async (queryInterface, Sequelize) => {
const tableName = `${config.DB_SCHEMA_NAME}.job_candidates`
await queryInterface.sequelize.query(`UPDATE ${tableName} SET status = 'shortlist' WHERE status = 'interview'`)
}
};
<file_sep>/*
* The Report class.
*/
const logger = require('./logger')
const constants = require('./constants')
const _ = require('lodash')
class Report {
constructor () {
this.messages = []
}
// append a message to the report
add (message) {
this.messages.push(message)
}
// print the last message to the console
print () {
const lastMessage = this.messages[this.messages.length - 1]
const output = `#${lastMessage.lnum} - ${_.map(lastMessage.info, 'text').join('; ')}`
if (lastMessage.status === constants.ProcessingStatus.Skipped) {
logger.warn(output)
}
if (lastMessage.status === constants.ProcessingStatus.Successful) {
logger.info(output)
}
if (lastMessage.status === constants.ProcessingStatus.Failed) {
logger.error(output)
}
}
// print a summary to the console
printSummary () {
const groups = _.groupBy(this.messages, 'status')
const groupsByTag = _.groupBy(_.flatten(_.map(this.messages, message => message.info)), 'tag')
// summarize total fails
const failure = groups[constants.ProcessingStatus.Failed] || []
// summarize total skips
const skips = groups[constants.ProcessingStatus.Skipped] || []
// summarize total jobs with isApplicationPageActive being set to true/false
const groupsByisApplicationPageActive = _.groupBy(groupsByTag.job_is_application_page_active_updated, 'currentValue')
const jobsWithIsApplicationPageActiveSetToTrue = groupsByisApplicationPageActive.true || []
const jobsWithIsApplicationPageActiveSetToFalse = groupsByisApplicationPageActive.false || []
// summarize total records with externalId not found in Taas API
const recordsWithExternalIdNotFound = groupsByTag.external_id_not_found || []
logger.info('=== summary ===')
logger.info(`No. of records read = ${this.messages.length}`)
logger.info(`No. of records updated for field isApplicationPageActive = true = ${jobsWithIsApplicationPageActiveSetToTrue.length}`)
logger.info(`No. of records updated for field isApplicationPageActive = false = ${jobsWithIsApplicationPageActiveSetToFalse.length}`)
logger.info(`No. of records : externalId not found = ${recordsWithExternalIdNotFound.length}`)
logger.info(`No. of records failed(all) = ${failure.length}`)
logger.info(`No. of records failed(excluding "externalId not found") = ${failure.length - recordsWithExternalIdNotFound.length}`)
logger.info(`No. of records skipped = ${skips.length}`)
}
}
module.exports = Report
<file_sep>const _ = require('lodash')
const logger = require('../common/logger')
const { searchResourceBookings } = require('./ResourceBookingService')
const { partiallyUpdateWorkPeriod } = require('./WorkPeriodService')
const { Scopes } = require('../../app-constants')
const { getUserById, getMemberDetailsByHandle } = require('../common/helper')
const { getCollectorName, createCollector, createMessage, upsertContactInSurveyMonkey, addContactsToSurvey, sendSurveyAPI } = require('../common/surveyMonkey')
const resourceBookingCache = {}
function buildSentSurveyError (e) {
return {
errorCode: _.get(e, 'code'),
errorMessage: _.get(e, 'message', e.toString())
}
}
/**
* Scheduler process entrance
*/
async function sendSurveys () {
const currentUser = {
isMachine: true,
scopes: [Scopes.ALL_WORK_PERIOD, Scopes.ALL_WORK_PERIOD_PAYMENT]
}
const criteria = {
fields: 'workPeriods,userId,id,sendWeeklySurvey',
sendWeeklySurvey: true,
'workPeriods.paymentStatus': 'completed',
'workPeriods.sentSurvey': false,
'workPeriods.sentSurveyError': '',
jobIds: [],
page: 1
}
const options = {
returnAll: true,
returnFromDB: true
}
try {
let resourceBookings = await searchResourceBookings(currentUser, criteria, options)
resourceBookings = resourceBookings.result
logger.info({ component: 'SurveyService', context: 'sendSurvey', message: 'load workPeriod successfully' })
const workPeriods = _.flatten(_.map(resourceBookings, 'workPeriods'))
const collectors = {}
// for each WorkPeriod make sure to creat a collector (one per week)
// so several WorkPeriods for the same week would be included into on collector
// and gather contacts (members) from each WorkPeriods
for (const workPeriod of workPeriods) {
try {
const collectorName = getCollectorName(workPeriod.endDate)
// create collector and message for each week if not yet
if (!collectors[collectorName]) {
const collectorId = await createCollector(collectorName)
const messageId = await createMessage(collectorId)
// create map
collectors[collectorName] = {
workPeriodIds: [],
collectorId,
messageId,
contacts: []
}
}
collectors[collectorName].workPeriodIds.push(workPeriod.id)
const resourceBooking = _.find(resourceBookings, (r) => r.id === workPeriod.resourceBookingId)
const userInfo = {}
if (!resourceBookingCache[resourceBooking.userId]) {
let user = await getUserById(resourceBooking.userId)
if (!user.email && user.handle) {
user = await getMemberDetailsByHandle(user.handle)
}
if (user.email) {
userInfo.email = user.email
if (user.firstName) {
userInfo.first_name = user.firstName
}
if (user.lastName) {
userInfo.last_name = user.lastName
}
resourceBookingCache[resourceBooking.userId] = userInfo
}
}
collectors[collectorName].contacts.push(resourceBookingCache[resourceBooking.userId])
} catch (e) {
try {
await partiallyUpdateWorkPeriod(
currentUser,
workPeriod.id,
{ sentSurveyError: buildSentSurveyError(e) }
)
} catch (e) {
logger.error({ component: 'SurveyService', context: 'sendSurvey', message: `Error updating survey as failed for Work Period "${workPeriod.id}": ` + e.message })
}
}
}
// add contacts
for (const collectorName of _.keys(collectors)) {
const collector = collectors[collectorName]
collectors[collectorName].contacts = await upsertContactInSurveyMonkey(collector.contacts)
}
// send surveys
for (const collectorName of _.keys(collectors)) {
const collector = collectors[collectorName]
if (collector.contacts.length) {
try {
await addContactsToSurvey(
collector.collectorId,
collector.messageId,
collector.contacts
)
await sendSurveyAPI(collector.collectorId, collector.messageId)
logger.debug({ component: 'SurveyService', context: 'sendSurvey', message: `Sent survey for collector "${collectorName}" details:` + JSON.stringify(collector) })
for (const workPeriodId of collectors[collectorName].workPeriodIds) {
try {
await partiallyUpdateWorkPeriod(currentUser, workPeriodId, { sentSurvey: true })
} catch (e) {
logger.error({ component: 'SurveyService', context: 'sendSurvey', message: `Error updating survey as sent for Work Period "${workPeriodId}": ` + e.message })
}
}
} catch (e) {
for (const workPeriodId of collectors[collectorName].workPeriodIds) {
try {
await partiallyUpdateWorkPeriod(
currentUser,
workPeriodId,
{ sentSurveyError: buildSentSurveyError(e) }
)
} catch (e) {
logger.error({ component: 'SurveyService', context: 'sendSurvey', message: `Error updating survey as failed for Work Period "${workPeriodId}": ` + e.message })
}
}
}
}
}
logger.info({ component: 'SurveyService', context: 'sendSurvey', message: 'Processing weekly surveys is completed' })
} catch (e) {
logger.error({ component: 'SurveyService', context: 'sendSurvey', message: 'Error sending surveys: ' + e.message })
}
}
module.exports = {
sendSurveys
}
<file_sep>const config = require('config')
/*
* Add viewedByCustomer field to the JobCandidata model.
*/
module.exports = {
up: async (queryInterface, Sequelize) => {
await queryInterface.addColumn({ tableName: 'job_candidates', schema: config.DB_SCHEMA_NAME }, 'viewed_by_customer',
{
type: Sequelize.BOOLEAN,
defaultValue: false,
allowNull: false
})
},
down: async (queryInterface, Sequelize) => {
await queryInterface.removeColumn({ tableName: 'job_candidates', schema: config.DB_SCHEMA_NAME }, 'viewed_by_customer')
}
}
<file_sep>const fs = require('fs')
const Joi = require('joi')
const config = require('config')
const path = require('path')
const _ = require('lodash')
const { Interviews, AggregatePaymentStatus, WorkPeriodPaymentStatus, WorkPeriodPaymentUpdateStatus, PaymentProcessingSwitch, WeeklySurveySwitch } = require('../app-constants')
const logger = require('./common/logger')
const allowedInterviewStatuses = _.values(Interviews.Status)
const allowedXAITemplate = _.keys(Interviews.XaiTemplate)
Joi.page = () => Joi.number().integer().min(1).default(1)
Joi.perPage = () => Joi.number().integer().min(1).default(20)
Joi.rateType = () => Joi.string().valid('hourly', 'daily', 'weekly', 'monthly', 'annual')
Joi.jobStatus = () => Joi.string().valid('sourcing', 'in-review', 'assigned', 'closed', 'cancelled')
Joi.resourceBookingStatus = () => Joi.string().valid('placed', 'closed', 'cancelled')
Joi.workload = () => Joi.string().valid('full-time', 'fractional')
Joi.jobCandidateStatus = () => Joi.string().valid('open', 'placed', 'selected', 'client rejected - screening', 'client rejected - interview', 'rejected - other', 'cancelled', 'interview', 'topcoder-rejected', 'applied', 'rejected-pre-screen', 'skills-test', 'skills-test', 'phone-screen', 'job-closed', 'offered', 'withdrawn', 'withdrawn-prescreen')
Joi.title = () => Joi.string().max(128)
Joi.paymentStatus = () => Joi.string().valid(..._.values(AggregatePaymentStatus))
Joi.xaiTemplate = () => Joi.string().valid(...allowedXAITemplate)
Joi.interviewStatus = () => Joi.string().valid(...allowedInterviewStatuses)
Joi.workPeriodPaymentStatus = () => Joi.string().valid(..._.values(WorkPeriodPaymentStatus))
Joi.workPeriodPaymentUpdateStatus = () => Joi.string().valid(..._.values(WorkPeriodPaymentUpdateStatus))
// Empty string is not allowed by Joi by default and must be enabled with allow('').
// See https://joi.dev/api/?v=17.3.0#string fro details why it's like this.
// In many cases we would like to allow empty string to make it easier to create UI for editing data.
Joi.stringAllowEmpty = () => Joi.string().allow('')
Joi.smallint = () => Joi.number().integer().min(-32768).max(32767)
function buildServices (dir) {
const files = fs.readdirSync(dir)
files.forEach((file) => {
const curPath = path.join(dir, file)
fs.stat(curPath, (err, stats) => {
if (err) return
if (stats.isDirectory()) {
buildServices(curPath)
} else if (path.extname(file) === '.js') {
const serviceName = path.basename(file, '.js')
logger.buildService(require(curPath), serviceName)
}
})
})
}
buildServices(path.join(__dirname, 'services'))
// validate some configurable parameters for the app
const paymentProcessingSwitchSchema = Joi.string().label('PAYMENT_PROCESSING_SWITCH').valid(
...Object.values(PaymentProcessingSwitch)
)
const weeklySurveySwitchSchema = Joi.string().label('WEEKLY_SURVEY_SWITCH').valid(
...Object.values(WeeklySurveySwitch)
)
try {
Joi.attempt(config.PAYMENT_PROCESSING.SWITCH, paymentProcessingSwitchSchema)
Joi.attempt(config.WEEKLY_SURVEY.SWITCH, weeklySurveySwitchSchema)
} catch (err) {
console.error(err.message)
process.exit(1)
}
<file_sep>/**
* Contains workPeriod routes
*/
const constants = require('../../app-constants')
module.exports = {
'/work-periods': {
get: {
controller: 'WorkPeriodController',
method: 'searchWorkPeriods',
auth: 'jwt',
scopes: [constants.Scopes.READ_WORK_PERIOD, constants.Scopes.ALL_WORK_PERIOD]
}
},
'/work-periods/:id': {
get: {
controller: 'WorkPeriodController',
method: 'getWorkPeriod',
auth: 'jwt',
scopes: [constants.Scopes.READ_WORK_PERIOD, constants.Scopes.ALL_WORK_PERIOD]
},
patch: {
controller: 'WorkPeriodController',
method: 'partiallyUpdateWorkPeriod',
auth: 'jwt',
scopes: [constants.Scopes.UPDATE_WORK_PERIOD, constants.Scopes.ALL_WORK_PERIOD]
}
}
}
<file_sep>'use strict';
const config = require('config')
/**
* Migrate JobCandidate status - from selected to placed.
*/
module.exports = {
up: async (queryInterface, Sequelize) => {
const tableName = `${config.DB_SCHEMA_NAME}.job_candidates`
await queryInterface.sequelize.query(`UPDATE ${tableName} SET status = 'placed' WHERE status = 'selected'`)
},
down: async (queryInterface, Sequelize) => {
const tableName = `${config.DB_SCHEMA_NAME}.job_candidates`
await queryInterface.sequelize.query(`UPDATE ${tableName} SET status = 'selected' WHERE status = 'placed'`)
}
};
<file_sep>const config = require('config')
const ResourceBooking = require('../src/models').ResourceBooking
const _ = require('lodash')
const helper = require('../src/common/helper')
const { v4: uuid } = require('uuid')
// maximum start date of resource bookings when populating work periods from existing resource bookings in migration script
const MAX_START_DATE = process.env.MAX_START_DATE || '2100-12-31'
// maximum end date of resource bookings when populating work periods from existing resource bookings in migration script
const MAX_END_DATE = process.env.MAX_END_DATE || '2100-12-31'
/*
* Populate WorkPeriods for ResourceBookings
*/
module.exports = {
up: async (queryInterface, Sequelize) => {
const transaction = await queryInterface.sequelize.transaction()
const Op = Sequelize.Op
try {
const resourceBookings = await ResourceBooking.findAll({
where: {
start_date: { [Op.lt]: new Date(MAX_START_DATE) },
end_date: { [Op.lt]: new Date(MAX_END_DATE) }
}
})
if (resourceBookings.length === 0) {
await transaction.rollback()
return
}
const workPeriodData = []
await Promise.all(resourceBookings.map(async rb => {
if (!_.isNil(rb.startDate) && !_.isNil(rb.endDate)) {
const periods = helper.extractWorkPeriods(rb.startDate, rb.endDate)
const user = await helper.ensureUserById(rb.userId)
_.forEach(periods, period => {
workPeriodData.push({
id: uuid(),
resource_booking_id: rb.id,
project_id: rb.projectId,
user_handle: user.handle,
start_date: period.startDate,
end_date: period.endDate,
days_worked: null,
payment_status: 'pending',
created_by: config.m2m.M2M_AUDIT_USER_ID,
created_at: new Date()
})
})
}
}))
await queryInterface.bulkInsert({ tableName: 'work_periods', schema: config.DB_SCHEMA_NAME }, workPeriodData, { transaction })
await transaction.commit()
} catch (err) {
await transaction.rollback()
throw err
}
},
down: async (queryInterface, Sequelize) => {
const transaction = await queryInterface.sequelize.transaction()
try {
const Op = Sequelize.Op
const resourceBookings = await ResourceBooking.findAll({
where: {
start_date: { [Op.lt]: new Date(MAX_START_DATE) },
end_date: { [Op.lt]: new Date(MAX_END_DATE) }
},
// include soft-deleted resourceBookings
paranoid: false
})
const resourceBookingIds = _.map(resourceBookings, rb => rb.dataValues.id)
await queryInterface.bulkDelete({ tableName: 'work_periods', schema: config.DB_SCHEMA_NAME },
{ resource_booking_id: { [Op.in]: resourceBookingIds } }, { transaction })
await transaction.commit()
} catch (err) {
await transaction.rollback()
throw err
}
}
}
<file_sep>/**
* Controller for health check endpoint
*/
const models = require('../models')
const config = require('config')
const logger = require('../common/logger')
// the topcoder-healthcheck-dropin library returns checksRun count,
// here it follows that to return such count
let checksRun = 0
/**
* Check health of the DB
* @param {Object} req the request
* @param {Object} res the response
*/
async function checkHealth (req, res) {
checksRun += 1
const conn = new models.Sequelize(config.get('DATABASE_URL'), {
logging: false
})
await conn
.authenticate()
.then(() => {
logger.info({ component: 'HealthCheckController', context: 'checkHealth', message: 'Connection has been established successfully.' })
})
.catch(err => {
logger.logFullError(err, { component: 'HealthCheckController', context: 'checkHealth' })
res.status(503)
})
await conn.close()
res.send({ checksRun })
}
module.exports = {
checkHealth
}
<file_sep>/*
* Handle events for Interview.
*/
const { Op } = require('sequelize')
const _ = require('lodash')
const config = require('config')
const models = require('../models')
const logger = require('../common/logger')
const helper = require('../common/helper')
const teamService = require('../services/TeamService')
const Constants = require('../../app-constants')
/**
* Once we request Interview for a JobCandidate, the invitation emails to be sent out.
*
* @param {Object} payload the event payload
* @returns {undefined}
*/
async function sendInvitationEmail (payload) {
const interview = payload.value
// get customer details via job candidate user
const jobCandidate = await models.JobCandidate.findById(interview.jobCandidateId)
const job = await jobCandidate.getJob()
teamService.sendEmail({}, {
template: 'interview-invitation',
cc: [interview.hostEmail, ...interview.guestEmails],
data: {
interview_id: interview.id,
interview_round: interview.round,
interviewee_name: interview.guestNames[0],
interviewer_name: interview.hostName,
xai_template: '/' + interview.templateUrl,
additional_interviewers_name: (interview.guestNames.slice(1)).join(','),
interview_length: interview.duration,
job_name: job.title
}
})
}
/**
* Check if there is overlapping interview, if there is overlapping, then send notifications.
*
* @param {Object} payload the event payload
* @returns {undefined}
*/
async function checkOverlapping (payload) {
const interview = payload.value
if (_.includes([Constants.Interviews.Status.Cancelled, Constants.Interviews.Status.Completed], interview.status)) {
return
}
const overlappingInterview = await models.Interview.findAll({
where: {
[Op.and]: [{
status: _.values(_.omit(Constants.Interviews.Status, 'Completed', 'Cancelled'))
}, {
[Op.or]: [{
startTimestamp: {
[Op.lt]: interview.endTimestamp,
[Op.gte]: interview.startTimestamp
}
}, {
endTimestamp: {
[Op.lte]: interview.endTimestamp,
[Op.gt]: interview.startTimestamp
}
}, {
[Op.and]: [{
startTimestamp: {
[Op.lt]: interview.startTimestamp
}
}, {
endTimestamp: {
[Op.gt]: interview.endTimestamp
}
}]
}]
}]
}
})
if (_.size(overlappingInterview) > 1) {
const template = helper.getEmailTemplatesForKey('notificationEmailTemplates')['taas.notification.interviews-overlapping']
const jobCandidates = await models.JobCandidate.findAll({ where: { id: _.map(overlappingInterview, 'jobCandidateId') } })
const jobs = await models.Job.findAll({ where: { id: _.uniq(_.map(jobCandidates, 'jobId')) } })
const interviews = []
for (const oli of overlappingInterview) {
const jobCandidate = _.find(jobCandidates, { id: oli.jobCandidateId })
const job = _.find(jobs, { id: jobCandidate.jobId })
const project = await helper.getProjectById({ isMachine: true }, job.projectId)
const user = await helper.getUserById(jobCandidate.userId)
interviews.push({
teamName: project.name,
teamURL: `${config.TAAS_APP_URL}/${project.id}`,
jobTitle: job.title,
jobURL: `${config.TAAS_APP_URL}/${project.id}/positions/${job.id}`,
candidateUserHandle: user.handle,
startTime: helper.formatDateTimeEDT(oli.startTimestamp),
endTime: helper.formatDateTimeEDT(oli.endTimestamp)
})
}
const emailData = {
serviceId: 'email',
type: 'taas.notification.interviews-overlapping',
details: {
from: template.from,
recipients: (template.recipients || []).map(email => ({ email })),
data: {
subject: template.subject,
interviews,
notificationType: {
overlappingInterview: true
},
description: 'Overlapping Job Candidate Interviews'
},
sendgridTemplateId: template.sendgridTemplateId,
version: 'v3'
}
}
const renderInterview = (iv) => (
{
type: 'section',
text: {
type: 'mrkdwn',
text: [
`*Team Name*: <${iv.teamURL}|${iv.teamName}>`,
`*Job Title*: <${iv.jobURL}|${iv.jobTitle}>`,
`*Job Candidate*: ${iv.candidateUserHandle}`,
`*Start Time*: ${helper.formatDateTimeEDT(iv.startTime)}`,
`*End Time*: ${helper.formatDateTimeEDT(iv.endTime)}`
].join('\n')
}
}
)
const slackData = {
serviceId: 'slack',
type: 'taas.notification.interviews-overlapping',
details: {
channel: config.NOTIFICATION_SLACK_CHANNEL,
text: template.subject,
blocks: [
{
type: 'section',
text: {
type: 'mrkdwn',
text: '*:last_quarter_moon: Overlapping Job Candidate Interviews*'
}
},
..._.map(interviews, renderInterview)
]
}
}
await helper.postEvent(config.NOTIFICATIONS_CREATE_TOPIC, {
notifications: [emailData, slackData]
})
logger.debug({
component: 'InterviewEventHandler',
context: 'checkOverlapping',
message: `interviewIds: ${_.join(_.map(overlappingInterview, 'id'), ',')}`
})
}
}
/**
* Process interview request event.
*
* @param {Object} payload the event payload
* @returns {undefined}
*/
async function processRequest (payload) {
await sendInvitationEmail(payload)
await checkOverlapping(payload)
}
/**
* Process interview update event.
*
* @param {Object} payload the event payload
* @returns {undefined}
*/
async function processUpdate (payload) {
await checkOverlapping(payload)
}
module.exports = {
processRequest,
processUpdate
}
<file_sep>Recruit CRM Data Import
===
# Configuration
Configuration file is at `./scripts/recruit-crm-job-import/config.js`.
# Usage
``` bash
node scripts/recruit-crm-job-import <pathname-to-a-csv-file>
```
By default the script creates jobs and resource bookings via `TC_API`.
# Example
Follow the README for Taas API to deploy Taas API locally and then point the script to the local API by running:
``` bash
export RCRM_IMPORT_CONFIG_NAMESAPCE=RCRM_IMPORT_
export RCRM_IMPORT_TAAS_API_URL=http://localhost:3000/api/v5
node scripts/recruit-crm-job-import scripts/recruit-crm-job-import/example_data.csv | tee /tmp/report.txt
```
The example output is:
``` bash
DEBUG: processing line #1 - {"directProjectId":"24568","projectId":"(dynamic load)","externalId":"","title":"taas-demo-job5","startDate":"10/26/2020","endDate":"01/29/2021","numPositions":"2","userHandle":"nkumartest","jobid":"(dynamic load)","customerRate":"20","memberRate":"10","_lnum":1}
WARN: #1 - externalId is missing
DEBUG: processed line #1
DEBUG: processing line #2 - {"directProjectId":"24568","projectId":"(dynamic load)","externalId":"0","title":"taas-demo-job5","startDate":"10/26/2020","endDate":"01/29/2021","numPositions":"2","userHandle":"not_found_handle","jobid":"(dynamic load)","customerRate":"20","memberRate":"10","_lnum":2}
ERROR: #2 - id: 51ce2216-0dee-4dcf-bf7d-79f862e8d63c job created; handle: not_found_handle user not found
DEBUG: processed line #2
DEBUG: processing line #3 - {"directProjectId":"24568","projectId":"(dynamic load)","externalId":"0","title":"taas-demo-job5","startDate":"10/26/2020","endDate":"01/29/2021","numPositions":"2","userHandle":"nkumartest","jobid":"(dynamic load)","customerRate":"20","memberRate":"10","_lnum":3}
DEBUG: userHandle: nkumartest userId: 57646ff9-1cd3-4d3c-88ba-eb09a395366c
INFO: #3 - id: 51ce2216-0dee-4dcf-bf7d-79f862e8d63c externalId: 0 job already exists; id: d49d2fbd-ba11-49dc-8eaa-5afafa7e993f resource booking created
DEBUG: processed line #3
DEBUG: processing line #4 - {"directProjectId":"24567","projectId":"(dynamic load)","externalId":"1212","title":"Dummy Description","startDate":"10/20/2020","endDate":"01/29/2021","numPositions":"2","userHandle":"pshah_manager","jobid":"(dynamic load)","customerRate":"150","memberRate":"100","_lnum":4}
DEBUG: userHandle: pshah_manager userId: a55fe1bc-1754-45fa-9adc-cf3d6d7c377a
INFO: #4 - id: e0267551-24fe-48b5-9605-719852901de2 job created; id: f6285f03-056d-446f-a69b-6d275a97d68a resource booking created
DEBUG: processed line #4
DEBUG: processing line #5 - {"directProjectId":"24566","projectId":"(dynamic load)","externalId":"23850272","title":"33fromzaps330","startDate":"02/21/2021","endDate":"03/15/2021","numPositions":"7","userHandle":"nkumar2","jobid":"(dynamic load)","customerRate":"50","memberRate":"30","_lnum":5}
DEBUG: userHandle: nkumar2 userId: 4b00d029-c87b-47b2-bfe2-0ab80d8b5774
INFO: #5 - id: cd94784c-432d-4c46-b860-04a89e7b1099 job created; id: 98604c13-c6f3-4203-b74f-db376e9f02e4 resource booking created
DEBUG: processed line #5
DEBUG: processing line #6 - {"directProjectId":"24565","projectId":"(dynamic load)","externalId":"23843365","title":"Designer","startDate":"02/24/2021","endDate":"03/30/2021","numPositions":"1","userHandle":"GunaK-TopCoder","jobid":"(dynamic load)","customerRate":"70","memberRate":"70","_lnum":6}
DEBUG: userHandle: GunaK-TopCoder userId: 2bba34d5-20e4-46d6-bfc1-05736b17afbb
INFO: #6 - id: 49883150-59c2-4e5b-b5c3-aaf6d11d0da2 job created; id: 5505b6b5-050c-421c-893f-b862b1a08092 resource booking created
DEBUG: processed line #6
DEBUG: processing line #7 - {"directProjectId":"24564","projectId":"(dynamic load)","externalId":"23836459","title":"demo-dev-19janV4","startDate":"01/20/2021","endDate":"01/30/2021","numPositions":"1","userHandle":"nkumar1","jobid":"(dynamic load)","customerRate":"400","memberRate":"200","_lnum":7}
DEBUG: userHandle: nkumar1 userId: ab19a53b-0607-4a99-8bdd-f3b0cb552293
INFO: #7 - id: b03dc641-d6be-4a15-9c86-ef38f0e20c28 job created; id: 8e332107-453b-4ec5-b934-902c829e73a2 resource booking created
DEBUG: processed line #7
INFO: === summary ===
INFO: total: 7
INFO: success: 5
INFO: failure: 1
INFO: skips: 1
INFO: jobs created: 5
INFO: resource bookings created: 5
INFO: jobs already exist: 1
INFO: resource bookings already exist: 0
INFO: validation errors: 0
INFO: user not found: 1
INFO: external id missing: 1
INFO: request error: 0
INFO: internal error: 0
INFO: === summary ===
INFO: done!
```
To list all skipped lines:
``` bash
cat /tmp/report.txt | grep 'WARN'
```
To find out whether there are some users not found by user handles, run the following command:
``` bash
cat /tmp/report.txt | grep 'ERROR' | grep 'user not found'
```
<file_sep>require('../../src/bootstrap')
const logger = require('../../src/common/logger')
const paymentService = require('../../src/services/PaymentService')
const options = [
{
name: 'Test joi validation for projectId-1',
content: {
userHandle: 'pshah_manager',
amount: 3,
billingAccountId: 80000069,
name: 'test payment for pshah_manager',
description: '## test payment'
}
},
{
name: 'Test joi validation for projectId-2',
content: {
projectId: 'project',
userHandle: 'pshah_manager',
amount: 3,
billingAccountId: 80000069,
name: 'test payment for pshah_manager',
description: '## test payment'
}
},
{
name: 'Test joi validation for userHandle',
content: {
projectId: 17234,
amount: 3,
billingAccountId: 80000069,
name: 'test payment for pshah_manager',
description: '## test payment'
}
},
{
name: 'Test joi validation for amount-1',
content: {
projectId: 17234,
userHandle: 'pshah_manager',
billingAccountId: 80000069,
name: 'test payment for pshah_manager',
description: '## test payment'
}
},
{
name: 'Test joi validation for amount-2',
content: {
projectId: 17234,
userHandle: 'pshah_manager',
amount: -10,
billingAccountId: 80000069,
name: 'test payment for pshah_manager',
description: '## test payment'
}
},
{
name: 'Successful payment creation',
content: {
projectId: 17234,
userHandle: 'pshah_manager',
amount: 3,
billingAccountId: 80000069,
name: 'test payment for pshah_manager',
description: '## test payment'
}
},
{
name: 'Successful payment creation without name and description',
content: {
projectId: 17234,
userHandle: 'pshah_customer',
amount: 2,
billingAccountId: 80000069
}
},
{
name: 'Failing payment creation with no active billing account',
content: {
projectId: 16839,
userHandle: 'pshah_customer',
amount: 2,
billingAccountId: 80000069,
name: 'test payment for pshah_customer',
description: '## test payment'
}
},
{
name: 'Failing payment creation with non existing user',
content: {
projectId: 17234,
userHandle: 'eisbilir',
amount: 2,
billingAccountId: 80000069
}
}
]
const test = async () => {
for (const option of options) {
logger.info({ component: 'demo-payment', context: 'test', message: `Starting to create payment for: ${option.name}` })
await paymentService.createPayment(option.content)
.then(data => {
logger.info({ component: 'demo-payment', context: 'test', message: `Payment successfuly created for: ${option.name}` })
})
// eslint-disable-next-line handle-callback-err
.catch(err => {
logger.error({ component: 'demo-payment', context: 'test', message: `Payment can't be created for: ${option.name}` })
})
}
}
// wait for bootstrap to complete it's job.
setTimeout(test, 2000)
<file_sep>'use strict';
const config = require('config')
const _ = require('lodash')
const { Interviews } = require('../app-constants')
// allowed status values
const statuses = _.values(Interviews.Status)
/**
* Create `interviews` table & relations.
*/
module.exports = {
up: async (queryInterface, Sequelize) => {
const table = { schema: config.DB_SCHEMA_NAME, tableName: 'interviews' }
await queryInterface.createTable(table, {
id: {
type: Sequelize.UUID,
primaryKey: true,
allowNull: false,
defaultValue: Sequelize.UUIDV4
},
jobCandidateId: {
field: 'job_candidate_id',
type: Sequelize.UUID,
allowNull: false,
references: {
model: {
tableName: 'job_candidates',
schema: config.DB_SCHEMA_NAME
},
key: 'id'
}
},
googleCalendarId: {
field: 'google_calendar_id',
type: Sequelize.STRING(255)
},
customMessage: {
field: 'custom_message',
type: Sequelize.TEXT
},
xaiTemplate: {
field: 'xai_template',
type: Sequelize.STRING(255),
allowNull: false
},
round: {
type: Sequelize.INTEGER,
allowNull: false
},
startTimestamp: {
field: 'start_timestamp',
type: Sequelize.DATE
},
attendeesList: {
field: 'attendees_list',
type: Sequelize.ARRAY(Sequelize.STRING)
},
status: {
type: Sequelize.ENUM(statuses),
allowNull: false
},
createdBy: {
field: 'created_by',
type: Sequelize.UUID,
allowNull: false
},
updatedBy: {
field: 'updated_by',
type: Sequelize.UUID
},
createdAt: {
field: 'created_at',
type: Sequelize.DATE
},
updatedAt: {
field: 'updated_at',
type: Sequelize.DATE
},
deletedAt: {
field: 'deleted_at',
type: Sequelize.DATE
}
}, { schema: config.DB_SCHEMA_NAME })
},
down: async (queryInterface, Sequelize) => {
const table = { schema: config.DB_SCHEMA_NAME, tableName: 'interviews' }
const statusTypeName = `${table.schema}.enum_${table.tableName}_status`
const transaction = await queryInterface.sequelize.transaction()
try {
await queryInterface.dropTable(table, { transaction })
// drop enum type for status column
await queryInterface.sequelize.query(`DROP TYPE ${statusTypeName}`, { transaction })
await transaction.commit()
} catch (err) {
await transaction.rollback()
throw err
}
}
};
<file_sep># Render Email Notification Template with some data
This script can render SendGrid Email Template (handlebars) `data/notifications-email-template.html` using some data from `data/notifications-email-demo-data.json` into `out/notifications-email-template-with-data.html`.
## Usage
Please run
```
node scripts/notification-email-template-renderer <notificationId>
```
where `<notificationId>` can be one of the keys in `data/notifications-email-demo-data.json` i.e:
- `candidatesAvailableForReview`
- `interviewComingUpForHost`
- `interviewComingUpForGuest`
- `interviewCompleted`
- `postInterviewCandidateAction`
- `upcomingResourceBookingExpiration`
The resulting file would be placed into `out/notifications-email-template-with-data.html`<file_sep>/*
* surveymonkey api
*
*/
const logger = require('./logger')
const config = require('config')
const _ = require('lodash')
const request = require('superagent')
const moment = require('moment')
const { encodeQueryString } = require('./helper')
/**
* This code uses several environment variables
*
* WEEKLY_SURVEY_SURVEY_CONTACT_GROUP_ID - the ID of contacts list which would be used to store all the contacts,
* see https://developer.surveymonkey.com/api/v3/#contact_lists-id
* WEEKLY_SURVEY_SURVEY_MASTER_COLLECTOR_ID - the ID of master collector - this collector should be created manually,
* and all other collectors would be created by copying this master collector.
* This is needed so we can make some config inside master collector which would
* be applied to all collectors.
* WEEKLY_SURVEY_SURVEY_MASTER_MESSAGE_ID - the ID of master message - similar to collector, this message would be created manually
* and then script would create copies of this message to use the same config.
*/
const localLogger = {
debug: (message, context) => logger.debug({ component: 'SurveyMonkeyAPI', context, message }),
error: (message, context) => logger.error({ component: 'SurveyMonkeyAPI', context, message }),
info: (message, context) => logger.info({ component: 'SurveyMonkeyAPI', context, message })
}
function getRemainingRequestCountMessage (response) {
return `today has sent ${response.header['x-ratelimit-app-global-day-limit'] - response.header['x-ratelimit-app-global-day-remaining']} requests`
}
function enrichErrorMessage (e) {
e.code = _.get(e, 'response.body.error.http_status_code')
e.message = _.get(e, 'response.body.error.message', e.toString())
return e
}
function getSingleItem (lst, errorMessage) {
if (lst.length === 0) {
return null
}
if (lst.length > 1) {
throw new Error(errorMessage)
}
return lst[0].id
}
/*
* get collector name
*
* format `Week Ending yyyy-nth(weeks)`
*/
function getCollectorName (dt) {
return config.WEEKLY_SURVEY.SURVEY_COLLECTOR_PREFIX + ' ' + moment(dt).format('M/D/YYYY')
}
/*
* search collector by name
*/
async function searchCollector (collectorName) {
const url = `${config.WEEKLY_SURVEY.BASE_URL}/surveys/${config.WEEKLY_SURVEY.SURVEY_ID}/collectors?${encodeQueryString({ name: collectorName })}`
try {
const response = await request
.get(url)
.set('Authorization', `Bearer ${config.WEEKLY_SURVEY.JWT_TOKEN}`)
.set('Content-Type', 'application/json')
.set('Accept', 'application/json')
localLogger.info(`URL ${url}, ${getRemainingRequestCountMessage(response)}`, 'searchCollector')
return getSingleItem(response.body.data, 'More than 1 collector found by name ' + collectorName)
} catch (e) {
const enrichedError = enrichErrorMessage(e)
localLogger.error(`URL ${url} ERROR ${enrichedError}, ${getRemainingRequestCountMessage(e.response)}`, 'searchCollector')
throw enrichedError
}
}
/*
* create a named collector if not created
* else return the collectId of the named collector
*/
async function createCollector (collectorName) {
let collectorID = await searchCollector(collectorName)
if (collectorID) {
return collectorID
}
collectorID = await cloneCollector()
await renameCollector(collectorID, collectorName)
return collectorID
}
/*
* clone collector from MASTER_COLLECTOR
*/
async function cloneCollector () {
const body = { from_collector_id: `${config.WEEKLY_SURVEY.SURVEY_MASTER_COLLECTOR_ID}` }
const url = `${config.WEEKLY_SURVEY.BASE_URL}/surveys/${config.WEEKLY_SURVEY.SURVEY_ID}/collectors`
try {
const response = await request
.post(url)
.set('Authorization', `Bearer ${config.WEEKLY_SURVEY.JWT_TOKEN}`)
.set('Content-Type', 'application/json')
.set('Accept', 'application/json')
.send(body)
localLogger.info(`URL ${url}, ${getRemainingRequestCountMessage(response)}`, 'cloneCollector')
return response.body.id
} catch (e) {
const enrichedError = enrichErrorMessage(e)
localLogger.error(`URL ${url} ERROR ${enrichedError}, ${getRemainingRequestCountMessage(e.response)}`, 'cloneCollector')
throw enrichedError
}
}
/*
* rename collector
*/
async function renameCollector (collectorId, name) {
const body = { name: name }
const url = `${config.WEEKLY_SURVEY.BASE_URL}/collectors/${collectorId}`
try {
const response = await request
.patch(url)
.set('Authorization', `Bearer ${config.WEEKLY_SURVEY.JWT_TOKEN}`)
.set('Content-Type', 'application/json')
.set('Accept', 'application/json')
.send(body)
localLogger.info(`URL ${url}, ${getRemainingRequestCountMessage(response)}`, 'renameCollector')
} catch (e) {
const enrichedError = enrichErrorMessage(e)
localLogger.error(`URL ${url} ERROR ${enrichedError}, ${getRemainingRequestCountMessage(e.response)}`, 'renameCollector')
throw enrichedError
}
}
/*
* create message
*/
async function createMessage (collectorId) {
const body = {
from_collector_id: `${config.WEEKLY_SURVEY.SURVEY_MASTER_COLLECTOR_ID}`,
from_message_id: `${config.WEEKLY_SURVEY.SURVEY_MASTER_MESSAGE_ID}`
}
const url = `${config.WEEKLY_SURVEY.BASE_URL}/collectors/${collectorId}/messages`
try {
const response = await request
.post(url)
.set('Authorization', `Bearer ${config.WEEKLY_SURVEY.JWT_TOKEN}`)
.set('Content-Type', 'application/json')
.set('Accept', 'application/json')
.send(body)
localLogger.info(`URL ${url}, ${getRemainingRequestCountMessage(response)}`, 'createMessage')
return response.body.id
} catch (e) {
const enrichedError = enrichErrorMessage(e)
localLogger.error(`URL ${url} ERROR ${enrichedError}, ${getRemainingRequestCountMessage(e.response)}`, 'createMessage')
throw enrichedError
}
}
/**
* Add Contact Email to List for sending a survey
*/
async function upsertContactInSurveyMonkey (list) {
list = _.filter(list, p => p.email)
if (!list.length) {
return []
}
const body = {
contacts: list
}
const url = `${config.WEEKLY_SURVEY.BASE_URL}/contact_lists/${config.WEEKLY_SURVEY.SURVEY_CONTACT_GROUP_ID}/contacts/bulk`
try {
const response = await request
.post(url)
.set('Authorization', `Bearer ${config.WEEKLY_SURVEY.JWT_TOKEN}`)
.set('Content-Type', 'application/json')
.set('Accept', 'application/json')
.send(body)
localLogger.info(`URL ${url}, ${getRemainingRequestCountMessage(response)}`, 'upsertContactInSurveyMonkey')
return _.concat(response.body.existing, response.body.succeeded)
} catch (e) {
const enrichedError = enrichErrorMessage(e)
localLogger.error(`URL ${url} ERROR ${enrichedError}, ${getRemainingRequestCountMessage(e.response)}`, 'createMessage')
throw enrichedError
}
}
async function addContactsToSurvey (collectorId, messageId, contactIds) {
const url = `${config.WEEKLY_SURVEY.BASE_URL}/collectors/${collectorId}/messages/${messageId}/recipients/bulk`
const body = { contact_ids: _.map(contactIds, 'id') }
try {
const response = await request
.post(url)
.set('Authorization', `Bearer ${config.WEEKLY_SURVEY.JWT_TOKEN}`)
.set('Content-Type', 'application/json')
.set('Accept', 'application/json')
.send(body)
localLogger.info(`URL ${url}, ${getRemainingRequestCountMessage(response)}`, 'addContactsToSurvey')
return response.body.id
} catch (e) {
const enrichedError = enrichErrorMessage(e)
localLogger.error(`URL ${url} ERROR ${enrichedError}, ${getRemainingRequestCountMessage(e.response)}`, 'addContactsToSurvey')
throw enrichedError
}
}
async function sendSurveyAPI (collectorId, messageId) {
const url = `${config.WEEKLY_SURVEY.BASE_URL}/collectors/${collectorId}/messages/${messageId}/send`
try {
const response = await request
.post(url)
.set('Authorization', `Bearer ${config.WEEKLY_SURVEY.JWT_TOKEN}`)
.set('Content-Type', 'application/json')
.set('Accept', 'application/json')
.send({})
localLogger.info(`URL ${url}, ${getRemainingRequestCountMessage(response)}`, 'sendSurveyAPI')
return response.body.id
} catch (e) {
const enrichedError = enrichErrorMessage(e)
localLogger.error(`URL ${url} ${enrichedError}, ${getRemainingRequestCountMessage(e.response)}`, 'sendSurveyAPI')
throw enrichedError
}
}
module.exports = {
getCollectorName,
createCollector,
createMessage,
upsertContactInSurveyMonkey,
addContactsToSurvey,
sendSurveyAPI
}
<file_sep>/**
* Controller for TaaS teams endpoints
*/
const HttpStatus = require('http-status-codes')
const service = require('../services/TeamService')
const helper = require('../common/helper')
/**
* Search teams
* @param req the request
* @param res the response
*/
async function searchTeams (req, res) {
const result = await service.searchTeams(req.authUser, req.query)
helper.setResHeaders(req, res, result)
res.send(result.result)
}
/**
* Get team
* @param req the request
* @param res the response
*/
async function getTeam (req, res) {
res.send(await service.getTeam(req.authUser, req.params.id))
}
/**
* Get team job
* @param req the request
* @param res the response
*/
async function getTeamJob (req, res) {
res.send(
await service.getTeamJob(req.authUser, req.params.id, req.params.jobId)
)
}
/**
* Send email through a particular template
* @param req the request
* @param res the response
*/
async function sendEmail (req, res) {
await service.sendEmail(req.authUser, req.body)
res.status(HttpStatus.NO_CONTENT).end()
}
/**
* Add members to a team.
* @param req the request
* @param res the response
*/
async function addMembers (req, res) {
res.send(
await service.addMembers(req.authUser, req.params.id, req.query, req.body)
)
}
/**
* Search members in a team.
* @param req the request
* @param res the response
*/
async function searchMembers (req, res) {
const result = await service.searchMembers(
req.authUser,
req.params.id,
req.query
)
res.send(result.result)
}
/**
* Search member invites for a team.
* @param req the request
* @param res the response
*/
async function searchInvites (req, res) {
const result = await service.searchInvites(
req.authUser,
req.params.id,
req.query
)
res.send(result.result)
}
/**
* Remove a member from a team.
* @param req the request
* @param res the response
*/
async function deleteMember (req, res) {
await service.deleteMember(
req.authUser,
req.params.id,
req.params.projectMemberId
)
res.status(HttpStatus.NO_CONTENT).end()
}
/**
* Return details about the current user.
* @param req the request
* @param res the response
*/
async function getMe (req, res) {
res.send(await service.getMe(req.authUser))
}
/**
* Return skills by job description.
* @param req the request
* @param res the response
*/
async function getSkillsByJobDescription (req, res) {
res.send(await service.getSkillsByJobDescription(req.body))
}
/**
*
* @param req the request
* @param res the response
*/
async function roleSearchRequest (req, res) {
res.send(await service.roleSearchRequest(req.authUser, req.body))
}
/**
*
* @param req the request
* @param res the response
*/
async function createTeam (req, res) {
res.send(await service.createTeam(req.authUser, req.body))
}
/**
* Search skills
* @param req the request
* @param res the response
*/
async function searchSkills (req, res) {
const result = await service.searchSkills(req.query)
helper.setResHeaders(req, res, result)
res.send(result.result)
}
/**
* Suggest members
* @param req the request
* @param res the response
*/
async function suggestMembers (req, res) {
res.send(await service.suggestMembers(req.authUser, req.params.fragment))
}
/**
*
* @param req the request
* @param res the response
*/
async function calculateAmount (req, res) {
res.send(await service.calculateAmount(req.body))
}
/**
*
* @param req the request
* @param res the response
*/
async function createPayment (req, res) {
res.send(await service.createPayment(req.body.totalAmount))
}
/**
*
* @param req the request
* @param res the response
*/
async function isExternalMember (req, res) {
res.send(await service.isExternalMember(req.body.memberId))
}
module.exports = {
searchTeams,
getTeam,
getTeamJob,
sendEmail,
addMembers,
searchMembers,
searchInvites,
deleteMember,
getMe,
getSkillsByJobDescription,
roleSearchRequest,
createTeam,
searchSkills,
suggestMembers,
createPayment,
calculateAmount,
isExternalMember
}
<file_sep>/**
* Reindex ResourceBookings data in Elasticsearch using data from database
*/
const config = require('config')
const { WorkPeriod, WorkPeriodPayment } = require('../../src/models')
const logger = require('../../src/common/logger')
const helper = require('../../src/common/helper')
const resourceBookingId = helper.getParamFromCliArgs()
const index = config.get('esConfig.ES_INDEX_RESOURCE_BOOKING')
const reIndexAllResourceBookingsPrompt = `WARNING: this would remove existent data! Are you sure you want to reindex the index ${index}`
const reIndexResourceBookingPrompt = `WARNING: this would remove existent data! Are you sure you want to reindex the document with id ${resourceBookingId} in index ${index}?`
const resourceBookingModelOpts = {
modelName: 'ResourceBooking',
include: [{
model: WorkPeriod,
as: 'workPeriods',
include: [{
model: WorkPeriodPayment,
as: 'payments'
}]
}]
}
async function reIndexResourceBookings () {
if (resourceBookingId === null) {
await helper.promptUser(reIndexAllResourceBookingsPrompt, async () => {
try {
await helper.indexBulkDataToES(resourceBookingModelOpts, index, logger)
process.exit(0)
} catch (err) {
logger.logFullError(err, { component: 'reIndexResourceBookings' })
process.exit(1)
}
})
} else {
await helper.promptUser(reIndexResourceBookingPrompt, async () => {
try {
await helper.indexDataToEsById(resourceBookingId, resourceBookingModelOpts, index, logger)
process.exit(0)
} catch (err) {
logger.logFullError(err, { component: 'reIndexResourceBookings' })
process.exit(1)
}
})
}
}
reIndexResourceBookings()
<file_sep>const { Sequelize, Model } = require('sequelize')
const config = require('config')
const _ = require('lodash')
const { Interviews } = require('../../app-constants')
const errors = require('../common/errors')
// allowed status values
const statuses = _.values(Interviews.Status)
module.exports = (sequelize) => {
class Interview extends Model {
/**
* Create association between models
* @param {Object} models the database models
*/
static associate (models) {
Interview.belongsTo(models.JobCandidate, { foreignKey: 'jobCandidateId' })
}
/**
* Get interview by id
* @param {String} id the interview id
* @returns {Interview} the Interview instance
*/
static async findById (id) {
const interview = await Interview.findOne({
where: {
id
}
})
if (!interview) {
throw new errors.NotFoundError(`id: ${id} "Interview" doesn't exist.`)
}
return interview
}
}
Interview.init(
{
id: {
type: Sequelize.UUID,
primaryKey: true,
allowNull: false,
defaultValue: Sequelize.UUIDV4
},
xaiId: {
field: 'xai_id',
type: Sequelize.STRING(255)
},
jobCandidateId: {
field: 'job_candidate_id',
type: Sequelize.UUID,
allowNull: false
},
calendarEventId: {
field: 'calendar_event_id',
type: Sequelize.STRING(255)
},
templateUrl: {
field: 'template_url',
type: Sequelize.STRING(255),
allowNull: false
},
templateId: {
field: 'template_id',
type: Sequelize.STRING(255)
},
templateType: {
field: 'template_type',
type: Sequelize.STRING(255)
},
title: {
field: 'title',
type: Sequelize.STRING(255)
},
locationDetails: {
field: 'location_details',
type: Sequelize.STRING(255)
},
duration: {
field: 'duration',
type: Sequelize.INTEGER
},
round: {
type: Sequelize.INTEGER,
allowNull: false
},
startTimestamp: {
field: 'start_timestamp',
type: Sequelize.DATE
},
endTimestamp: {
field: 'end_timestamp',
type: Sequelize.DATE
},
hostName: {
field: 'host_name',
type: Sequelize.STRING(255)
},
hostEmail: {
field: 'host_email',
type: Sequelize.STRING(255)
},
guestNames: {
field: 'guest_names',
type: Sequelize.ARRAY(Sequelize.STRING)
},
guestEmails: {
field: 'guest_emails',
type: Sequelize.ARRAY(Sequelize.STRING)
},
status: {
type: Sequelize.ENUM(statuses),
allowNull: false
},
rescheduleUrl: {
field: 'reschedule_url',
type: Sequelize.STRING(255)
},
createdBy: {
field: 'created_by',
type: Sequelize.UUID,
allowNull: false
},
updatedBy: {
field: 'updated_by',
type: Sequelize.UUID
},
createdAt: {
field: 'created_at',
type: Sequelize.DATE
},
updatedAt: {
field: 'updated_at',
type: Sequelize.DATE
},
deletedAt: {
field: 'deleted_at',
type: Sequelize.DATE
}
},
{
schema: config.DB_SCHEMA_NAME,
sequelize,
tableName: 'interviews',
paranoid: false,
deletedAt: 'deletedAt',
createdAt: 'createdAt',
updatedAt: 'updatedAt',
timestamps: true
}
)
return Interview
}
<file_sep>/*
* The entry of event handlers.
*/
const config = require('config')
const eventDispatcher = require('../common/eventDispatcher')
const JobEventHandler = require('./JobEventHandler')
const JobCandidateEventHandler = require('./JobCandidateEventHandler')
const ResourceBookingEventHandler = require('./ResourceBookingEventHandler')
const InterviewEventHandler = require('./InterviewEventHandler')
const RoleEventHandler = require('./RoleEventHandler')
const WorkPeriodPaymentEventHandler = require('./WorkPeriodPaymentEventHandler')
const TeamEventHandler = require('./TeamEventHandler')
const logger = require('../common/logger')
const TopicOperationMapping = {
[config.TAAS_JOB_CREATE_TOPIC]: JobEventHandler.processCreate,
[config.TAAS_JOB_UPDATE_TOPIC]: JobEventHandler.processUpdate,
[config.TAAS_JOB_CANDIDATE_CREATE_TOPIC]: JobCandidateEventHandler.processCreate,
[config.TAAS_JOB_CANDIDATE_UPDATE_TOPIC]: JobCandidateEventHandler.processUpdate,
[config.TAAS_RESOURCE_BOOKING_CREATE_TOPIC]: ResourceBookingEventHandler.processCreate,
[config.TAAS_RESOURCE_BOOKING_UPDATE_TOPIC]: ResourceBookingEventHandler.processUpdate,
[config.TAAS_RESOURCE_BOOKING_DELETE_TOPIC]: ResourceBookingEventHandler.processDelete,
[config.TAAS_WORK_PERIOD_PAYMENT_CREATE_TOPIC]: WorkPeriodPaymentEventHandler.processCreate,
[config.TAAS_WORK_PERIOD_PAYMENT_UPDATE_TOPIC]: WorkPeriodPaymentEventHandler.processUpdate,
[config.TAAS_INTERVIEW_REQUEST_TOPIC]: InterviewEventHandler.processRequest,
[config.TAAS_INTERVIEW_UPDATE_TOPIC]: InterviewEventHandler.processUpdate,
[config.TAAS_ROLE_DELETE_TOPIC]: RoleEventHandler.processDelete,
[config.TAAS_TEAM_CREATE_TOPIC]: TeamEventHandler.processCreate
}
/**
* Handle event.
*
* @param {String} topic the topic name
* @param {Object} payload the message payload
* @returns {undefined}
*/
async function handleEvent (topic, payload) {
if (!TopicOperationMapping[topic]) {
logger.debug({ component: 'eventHanders', context: 'handleEvent', message: `not interested event - topic: ${topic}` })
return
}
logger.info({ component: 'eventHanders', context: 'handleEvent', message: `event handling - topic: ${topic}` })
logger.debug({ component: 'eventHanders', context: 'handleEvent', message: `handling event - topic: ${topic} - payload: ${JSON.stringify(payload)}` })
try {
await TopicOperationMapping[topic](payload)
} catch (err) {
logger.error({ component: 'eventHanders', context: 'handleEvent', message: 'failed to handle event' })
// throw error so that it can be handled by the app
throw err
}
logger.info({ component: 'eventHanders', context: 'handleEvent', message: `event successfully handled - topic: ${topic}` })
}
/**
* Attach the handlers to the event dispatcher.
*
* @returns {undefined}
*/
function init () {
eventDispatcher.register({
handleEvent
})
}
module.exports = {
init
}
<file_sep>const config = require('config')
/*
* Add externalId and resume fields to the JobCandidate model.
*/
module.exports = {
up: async (queryInterface, Sequelize) => {
const transaction = await queryInterface.sequelize.transaction()
try {
await queryInterface.addColumn({ tableName: 'job_candidates', schema: config.DB_SCHEMA_NAME }, 'external_id',
{ type: Sequelize.STRING(255) },
{ transaction })
await queryInterface.addColumn({ tableName: 'job_candidates', schema: config.DB_SCHEMA_NAME }, 'resume',
{ type: Sequelize.STRING(2048) },
{ transaction })
await transaction.commit()
} catch (err) {
await transaction.rollback()
throw err
}
},
down: async (queryInterface, Sequelize) => {
const transaction = await queryInterface.sequelize.transaction()
try {
await queryInterface.removeColumn({ tableName: 'job_candidates', schema: config.DB_SCHEMA_NAME }, 'resume',
{ transaction })
await queryInterface.removeColumn({ tableName: 'job_candidates', schema: config.DB_SCHEMA_NAME }, 'external_id',
{ transaction })
await transaction.commit()
} catch (err) {
await transaction.rollback()
throw err
}
}
}
<file_sep>'use strict';
const config = require('config')
/**
* Migrate work_period_payments challenge_id - from not null to allow null.
* enum_work_period_payments_status from completed, cancelled to completed, canceled, scheduled.
*/
module.exports = {
up: async (queryInterface, Sequelize) => {
const table = { tableName: 'work_period_payments', schema: config.DB_SCHEMA_NAME }
await Promise.all([
queryInterface.changeColumn(table, 'challenge_id', { type: Sequelize.UUID }),
queryInterface.sequelize.query(`ALTER TYPE ${config.DB_SCHEMA_NAME}.enum_work_period_payments_status ADD VALUE 'scheduled'`)
])
},
down: async (queryInterface, Sequelize) => {
const table = { tableName: 'work_period_payments', schema: config.DB_SCHEMA_NAME }
await Promise.all([
queryInterface.changeColumn(table, 'challenge_id', { type: Sequelize.UUID, allowNull: false }),
queryInterface.sequelize.query(`
DELETE
FROM
pg_enum
WHERE
enumlabel = 'scheduled' AND
enumtypid = (
SELECT
oid
FROM
pg_type
WHERE
typname = 'enum_work_period_payments_status'
)
`)
])
}
};
<file_sep>/**
* Notification scheduler service - has the cron handlers for sending different types of notifications (email, web etc)
*/
const _ = require('lodash')
const { Op } = require('sequelize')
const moment = require('moment')
const config = require('config')
const models = require('../models')
const Job = models.Job
const JobCandidate = models.JobCandidate
const Interview = models.Interview
const ResourceBooking = models.ResourceBooking
const helper = require('../common/helper')
const constants = require('../../app-constants')
const logger = require('../common/logger')
const localLogger = {
debug: (message, context) => logger.debug({ component: 'NotificationSchedulerService', context, message }),
error: (message, context) => logger.error({ component: 'NotificationSchedulerService', context, message }),
info: (message, context) => logger.info({ component: 'NotificationSchedulerService', context, message })
}
const emailTemplates = helper.getEmailTemplatesForKey('notificationEmailTemplates')
/**
* Returns the project with the given id
* @param projectId the project id
* @returns the project
*/
async function getProjectWithId (projectId) {
let project = null
try {
project = await helper.getProjectById(helper.getAuditM2Muser(), projectId)
} catch (err) {
localLogger.error(
`exception fetching project with id: ${projectId} Status Code: ${err.status} message: ${_.get(err, 'response.text', err.toString())}`, 'getProjectWithId')
}
return project
}
/**
* extract the members of projects and build recipients list out of them
* we can use `userId` to identify recipients
* @param project the project
* @returns {string[]} array of recipients
*/
function buildProjectTeamRecipients (project) {
const recipients = _.unionBy(_.map(project.members, m => _.pick(m, 'userId')), 'userId')
if (_.isEmpty(recipients)) {
localLogger.error(`No recipients for projectId:${project.id}`, 'buildProjectTeamRecipients')
}
return recipients
}
/**
* Gets the user with the given id
* @param userId the user id
* @returns the user
*/
async function getUserWithId (userId) {
let user = null
try {
user = await helper.ensureUserById(userId)
} catch (err) {
localLogger.error(
`exception fetching user with id: ${userId} Status Code: ${err.status} message: ${_.get(err, 'response.text', err.toString())}`, 'getUserWithId')
}
return user
}
/**
* returns the data for the interview
* @param interview the interview
* @param jobCandidate optional jobCandidate corresponding to interview
* @param job option job corresponding to interview
* @returns the interview details in format used by client
*/
async function getDataForInterview (interview, jobCandidate, job) {
jobCandidate = jobCandidate || await JobCandidate.findById(interview.jobCandidateId)
job = job || await Job.findById(jobCandidate.jobId)
const user = await getUserWithId(jobCandidate.userId)
if (!user) { return null }
const interviewLink = `${config.TAAS_APP_URL}/${job.projectId}/positions/${job.id}/candidates/interviews`
const guestName = _.isEmpty(interview.guestNames) ? '' : interview.guestNames[0]
const startTime = interview.startTimestamp ? helper.formatDateTimeEDT(interview.startTimestamp) : ''
const jobUrl = `${config.TAAS_APP_URL}/${job.projectId}/positions/${job.id}`
return {
jobTitle: job.title,
guestFullName: guestName,
hostFullName: interview.hostName,
candidateName: `${user.firstName} ${user.lastName}`,
handle: user.handle,
attendees: interview.guestNames,
startTime: startTime,
duration: interview.duration,
interviewLink,
jobUrl
}
}
/**
* Sends notifications to all the teams which have candidates available for review
*/
async function sendCandidatesAvailableNotifications () {
localLogger.debug('[sendCandidatesAvailableNotifications]: Looking for due records...')
const jobsDao = await Job.findAll({
include: [{
model: JobCandidate,
as: 'candidates',
required: true,
where: {
status: constants.JobStatus.OPEN
}
}]
})
const jobs = _.map(jobsDao, dao => dao.dataValues)
const projectIds = _.uniq(_.map(jobs, job => job.projectId))
localLogger.debug(`[sendCandidatesAvailableNotifications]: Found ${projectIds.length} projects with Job Candidates awaiting for review.`)
// for each unique project id, send an email
let sentCount = 0
for (const projectId of projectIds) {
const project = await getProjectWithId(projectId)
if (!project) { continue }
const projectTeamRecipients = buildProjectTeamRecipients(project)
const projectJobs = _.filter(jobs, job => job.projectId === projectId)
const teamJobs = []
for (const projectJob of projectJobs) {
// get candidate list
const jobCandidates = []
for (const jobCandidate of projectJob.candidates) {
const user = await getUserWithId(jobCandidate.userId)
if (!user) { continue }
jobCandidates.push({
handle: user.handle,
status: jobCandidate.status
})
}
// review link
const reviewLink = `${config.TAAS_APP_URL}/${projectId}/positions/${projectJob.id}/candidates/to-review`
// get # of resource bookings
const nResourceBookings = await ResourceBooking.count({
where: {
jobId: projectJob.id
}
})
const jobUrl = `${config.TAAS_APP_URL}/${projectId}/positions/${projectJob.id}`
teamJobs.push({
title: projectJob.title,
nResourceBookings,
jobCandidates,
reviewLink,
jobUrl
})
}
sendNotification({}, {
template: 'taas.notification.candidates-available-for-review',
recipients: projectTeamRecipients,
data: {
teamName: project.name,
teamJobs,
notificationType: {
candidatesAvailableForReview: true
},
description: 'Candidates are available for review'
}
})
sentCount++
}
localLogger.debug(`[sendCandidatesAvailableNotifications]: Sent notifications for ${sentCount} of ${projectIds.length} projects with Job Candidates awaiting for review.`)
}
/**
* Sends reminders to the hosts and guests about their upcoming interview(s)
*/
async function sendInterviewComingUpNotifications () {
localLogger.debug('[sendInterviewComingUpNotifications]: Looking for due records...')
const currentTime = moment.utc().startOf('minute')
const timestampFilter = {
[Op.or]: []
}
const window = moment.duration(config.INTERVIEW_COMING_UP_MATCH_WINDOW)
for (const remindTime of config.INTERVIEW_COMING_UP_REMIND_TIME) {
const rangeStart = currentTime.clone().add(moment.duration(remindTime))
const rangeEnd = rangeStart.clone().add(window)
timestampFilter[Op.or].push({
[Op.and]: [
{
[Op.gte]: rangeStart
},
{
[Op.lt]: rangeEnd
}
]
})
}
const filter = {
[Op.and]: [
{
status: {
[Op.in]: [
constants.Interviews.Status.Scheduled,
constants.Interviews.Status.Rescheduled
]
}
},
{
startTimestamp: timestampFilter
}
]
}
const interviews = await Interview.findAll({
where: filter,
raw: true
})
localLogger.debug(`[sendInterviewComingUpNotifications]: Found ${interviews.length} interviews which are coming soon.`)
let sentHostCount = 0
let sentGuestCount = 0
for (const interview of interviews) {
// send host email
const data = await getDataForInterview(interview)
if (!data) { continue }
if (!_.isEmpty(interview.hostEmail)) {
sendNotification({}, {
template: 'taas.notification.interview-coming-up-host',
recipients: [{ email: interview.hostEmail }],
data: {
...data,
notificationType: {
interviewComingUpForHost: true
},
description: 'Interview Coming Up'
}
})
sentHostCount++
} else {
localLogger.error(`Interview id: ${interview.id} host email not present`, 'sendInterviewComingUpNotifications')
}
if (!_.isEmpty(interview.guestEmails)) {
// send guest emails
sendNotification({}, {
template: 'taas.notification.interview-coming-up-guest',
recipients: interview.guestEmails.map((email) => ({ email })),
data: {
...data,
notificationType: {
interviewComingUpForGuest: true
},
description: 'Interview Coming Up'
}
})
sentGuestCount++
} else {
localLogger.error(`Interview id: ${interview.id} guest emails not present`, 'sendInterviewComingUpNotifications')
}
}
localLogger.debug(`[sendInterviewComingUpNotifications]: Sent notifications for ${sentHostCount} hosts and ${sentGuestCount} guest of ${interviews.length} interviews which are coming soon.`)
}
/**
* Sends reminder to the interview host after it ends to change the interview status
*/
async function sendInterviewCompletedNotifications () {
localLogger.debug('[sendInterviewCompletedNotifications]: Looking for due records...')
const window = moment.duration(config.INTERVIEW_COMPLETED_MATCH_WINDOW)
const rangeStart = moment.utc().startOf('minute').subtract(moment.duration(config.INTERVIEW_COMPLETED_PAST_TIME))
const rangeEnd = rangeStart.clone().add(window)
const filter = {
[Op.and]: [
{
status: {
[Op.in]: [
constants.Interviews.Status.Scheduled,
constants.Interviews.Status.Rescheduled,
constants.Interviews.Status.Completed
]
}
},
{
startTimestamp: {
[Op.and]: [
{
[Op.gte]: rangeStart
},
{
[Op.lt]: rangeEnd
}
]
}
}
]
}
let interviews = await Interview.findAll({
where: filter,
raw: true
})
interviews = _.map(_.values(_.groupBy(interviews, 'jobCandidateId')), (interviews) => _.maxBy(interviews, 'round'))
const jobCandidates = await JobCandidate.findAll({ where: { id: _.map(interviews, 'jobCandidateId') } })
const jcMap = _.keyBy(jobCandidates, 'id')
localLogger.debug(`[sendInterviewCompletedNotifications]: Found ${interviews.length} interviews which must be ended by now.`)
let sentCount = 0
for (const interview of interviews) {
if (_.isEmpty(interview.hostEmail)) {
localLogger.error(`Interview id: ${interview.id} host email not present`)
continue
}
if (!jcMap[interview.jobCandidateId] || jcMap[interview.jobCandidateId].status !== constants.JobCandidateStatus.INTERVIEW) {
localLogger.error(`Interview id: ${interview.id} job candidate status is not ${constants.JobCandidateStatus.INTERVIEW}`)
continue
}
const data = await getDataForInterview(interview, jcMap[interview.jobCandidateId])
if (!data) { continue }
sendNotification({}, {
template: 'taas.notification.interview-awaits-resolution',
recipients: [{ email: interview.hostEmail }],
data: {
...data,
notificationType: {
interviewCompleted: true
},
description: 'Interview Completed'
}
})
sentCount++
}
localLogger.debug(`[sendInterviewCompletedNotifications]: Sent notifications for ${sentCount} of ${interviews.length} interviews which must be ended by now.`)
}
/**
* Sends reminder to the all members of teams which have interview completed to take action
* to update the job candidate status
*/
async function sendPostInterviewActionNotifications () {
localLogger.debug('[sendPostInterviewActionNotifications]: Looking for due records...')
const completedJobCandidates = await JobCandidate.findAll({
where: {
status: constants.JobCandidateStatus.INTERVIEW
},
include: [{
model: Interview,
as: 'interviews',
required: true,
where: {
status: {
[Op.in]: [
constants.Interviews.Status.Scheduled,
constants.Interviews.Status.Rescheduled,
constants.Interviews.Status.Completed
]
},
startTimestamp: {
[Op.lte]: moment.utc().subtract(moment.duration(config.POST_INTERVIEW_ACTION_MATCH_WINDOW))
}
}
}]
})
// get all project ids for this job candidates
const jobs = await Job.findAll({
where: {
id: {
[Op.in]: completedJobCandidates.map(jc => jc.jobId)
}
},
raw: true
})
const projectIds = _.uniq(_.map(jobs, job => job.projectId))
localLogger.debug(`[sendPostInterviewActionNotifications]: Found ${projectIds.length} projects with ${completedJobCandidates.length} Job Candidates with interview completed awaiting for an action.`)
let sentCount = 0
const template = 'taas.notification.post-interview-action-required'
for (const projectId of projectIds) {
const project = await getProjectWithId(projectId)
if (!project) { continue }
const webNotifications = []
const projectTeamRecipients = buildProjectTeamRecipients(project)
const projectJobs = _.filter(jobs, job => job.projectId === projectId)
const teamInterviews = []
let numCandidates = 0
for (const projectJob of projectJobs) {
const projectJcs = _.filter(completedJobCandidates, jc => jc.jobId === projectJob.id)
numCandidates += projectJcs.length
for (const projectJc of projectJcs) {
const interview = _.maxBy(projectJc.interviews, 'round')
const d = await getDataForInterview(interview, projectJc, projectJob)
if (!d) { continue }
d.jobUrl = `${config.TAAS_APP_URL}/${projectId}/positions/${projectJob.id}`
webNotifications.push({
serviceId: 'web',
type: template,
details: {
recipients: projectTeamRecipients,
contents: {
jobTitle: d.jobTitle,
teamName: project.name,
projectId,
jobId: projectJob.id,
userHandle: d.handle
},
version: 1
}
})
teamInterviews.push(d)
}
}
sendNotification({}, {
template,
recipients: projectTeamRecipients,
data: {
teamName: project.name,
numCandidates,
teamInterviews,
notificationType: {
postInterviewCandidateAction: true
},
description: 'Post Interview Candidate Action Reminder'
}
}, webNotifications)
sentCount++
}
localLogger.debug(`[sendPostInterviewActionNotifications]: Sent notifications for ${sentCount} of ${projectIds.length} projects with Job Candidates with interview completed awaiting for an action.`)
}
/**
* Sends reminders to all members of teams which have atleast one upcoming resource booking expiration
*/
async function sendResourceBookingExpirationNotifications () {
localLogger.debug('[sendResourceBookingExpirationNotifications]: Looking for due records...')
const currentTime = moment.utc()
const maxEndDate = currentTime.clone().add(moment.duration(config.RESOURCE_BOOKING_EXPIRY_TIME))
const expiringResourceBookings = await ResourceBooking.findAll({
where: {
endDate: {
[Op.and]: [
{
[Op.gt]: currentTime
},
{
[Op.lte]: maxEndDate
}
]
}
},
raw: true
})
const jobs = await Job.findAll({
where: {
id: {
[Op.in]: _.map(expiringResourceBookings, rb => rb.jobId)
}
},
raw: true
})
const projectIds = _.uniq(_.map(expiringResourceBookings, rb => rb.projectId))
localLogger.debug(`[sendResourceBookingExpirationNotifications]: Found ${projectIds.length} projects with ${expiringResourceBookings.length} Resource Bookings expiring in less than 3 weeks.`)
let sentCount = 0
const template = 'taas.notification.resource-booking-expiration'
for (const projectId of projectIds) {
const project = await getProjectWithId(projectId)
if (!project) { continue }
const projectTeamRecipients = buildProjectTeamRecipients(project)
const projectJobs = _.filter(jobs, job => job.projectId === projectId)
let numResourceBookings = 0
const teamResourceBookings = []
for (const projectJob of projectJobs) {
const resBookings = _.filter(expiringResourceBookings, rb => rb.jobId === projectJob.id)
numResourceBookings += resBookings.length
for (const booking of resBookings) {
const user = await getUserWithId(booking.userId)
if (!user) { continue }
const jobUrl = `${config.TAAS_APP_URL}/${projectId}/positions/${projectJob.id}`
const resourceBookingUrl = `${config.TAAS_APP_URL}/${projectId}/rb/${booking.id}`
teamResourceBookings.push({
jobTitle: projectJob.title,
handle: user.handle,
endDate: booking.endDate,
jobUrl,
resourceBookingUrl
})
}
}
const webData = {
serviceId: 'web',
type: template,
details: {
recipients: projectTeamRecipients,
contents: {
teamName: project.name,
projectId,
numOfExpiringResourceBookings: numResourceBookings
},
version: 1
}
}
const teamUrl = `${config.TAAS_APP_URL}/${project.id}`
sendNotification({}, {
template,
recipients: projectTeamRecipients,
data: {
teamName: project.name,
numResourceBookings,
teamResourceBookings,
notificationType: {
upcomingResourceBookingExpiration: true
},
teamUrl,
description: 'Upcoming Resource Booking Expiration'
}
}, [webData])
sentCount++
}
localLogger.debug(`[sendResourceBookingExpirationNotifications]: Sent notifications for ${sentCount} of ${projectIds.length} projects with Resource Bookings expiring in less than 3 weeks.`)
}
/**
* Send notification through a particular template
* @param {Object} currentUser the user who perform this operation
* @param {Object} data the email object
* @param {Array} webNotifications the optional list of web notifications
*/
async function sendNotification (currentUser, data, webNotifications = []) {
const template = emailTemplates[data.template]
const dataCC = data.cc || []
const templateCC = (template.cc || []).map(email => ({ email }))
const dataRecipients = data.recipients || []
const templateRecipients = (template.recipients || []).map(email => ({ email }))
const subjectBody = {
subject: data.subject || template.subject,
body: data.body || template.body
}
for (const key in subjectBody) {
subjectBody[key] = helper.substituteStringByObject(
subjectBody[key],
data.data
)
}
const recipients = _.uniq([...dataRecipients, ...templateRecipients])
const emailData = {
serviceId: 'email',
type: data.template,
details: {
from: data.from || template.from,
recipients,
cc: _.uniq([...dataCC, ...templateCC]),
data: { ...data.data, ...subjectBody },
sendgridTemplateId: template.sendgridTemplateId,
version: 'v3'
}
}
const notifications = [emailData, ...webNotifications]
await helper.postEvent(config.NOTIFICATIONS_CREATE_TOPIC, {
notifications
})
}
module.exports = {
sendNotification,
sendCandidatesAvailableNotifications,
sendInterviewComingUpNotifications,
sendInterviewCompletedNotifications,
sendPostInterviewActionNotifications,
sendResourceBookingExpirationNotifications
}
<file_sep>/**
* Controller for WorkPeriodPayment endpoints
*/
const service = require('../services/WorkPeriodPaymentService')
const helper = require('../common/helper')
/**
* Get workPeriodPayment by id
* @param req the request
* @param res the response
*/
async function getWorkPeriodPayment (req, res) {
res.send(await service.getWorkPeriodPayment(req.authUser, req.params.id, req.query.fromDb))
}
/**
* Create workPeriodPayment
* @param req the request
* @param res the response
*/
async function createWorkPeriodPayment (req, res) {
res.send(await service.createWorkPeriodPayment(req.authUser, req.body))
}
/**
* Create workPeriodPayments in bulk
* @param req the request
* @param res the response
*/
async function createBulkOfWorkPeriodPayments (req, res) {
res.send(await service.createBulkOfWorkPeriodPayments(req.authUser, req.body))
}
/**
* Update workPeriodPayments in bulk
* @param req the request
* @param res the response
*/
async function updateBulkOfWorkPeriodPayments (req, res) {
res.send(await service.updateBulkOfWorkPeriodPayments(req.authUser, req.body))
}
/**
* Partially update workPeriodPayment by id
* @param req the request
* @param res the response
*/
async function partiallyUpdateWorkPeriodPayment (req, res) {
res.send(await service.partiallyUpdateWorkPeriodPayment(req.authUser, req.params.id, req.body))
}
/**
* Search workPeriodPayments
* @param req the request
* @param res the response
*/
async function searchWorkPeriodPayments (req, res) {
const result = await service.searchWorkPeriodPayments(req.authUser, req.query)
helper.setResHeaders(req, res, result)
res.send(result.result)
}
/**
* Create all query workPeriodPayments
* @param req the request
* @param res the response
*/
async function createQueryWorkPeriodPayments (req, res) {
res.send(await service.createQueryWorkPeriodPayments(req.authUser, req.body))
}
module.exports = {
getWorkPeriodPayment,
createWorkPeriodPayment,
createBulkOfWorkPeriodPayments,
updateBulkOfWorkPeriodPayments,
createQueryWorkPeriodPayments,
partiallyUpdateWorkPeriodPayment,
searchWorkPeriodPayments
}
<file_sep>/*
* Constants for the RCRM import script.
*/
module.exports = {
ProcessingStatus: {
Successful: 'successful',
Failed: 'failed',
Skipped: 'skipped'
},
fieldNameMap: {
DirectprojectId: 'directProjectId',
externalId: 'externalId',
title: 'title',
startDate: 'startDate',
endDate: 'endDate',
numPositions: 'numPositions',
userHandle: 'userHandle',
customerRate: 'customerRate',
memberRate: 'memberRate'
}
}
<file_sep>const config = require('config')
const moment = require('moment')
module.exports = {
up: async (queryInterface, Sequelize) => {
const transaction = await queryInterface.sequelize.transaction()
try {
await queryInterface.addColumn({ tableName: 'resource_bookings', schema: config.DB_SCHEMA_NAME }, 'send_weekly_survey',
{ type: Sequelize.BOOLEAN, allowNull: false, defaultValue: true },
{ transaction })
await queryInterface.addColumn({ tableName: 'work_periods', schema: config.DB_SCHEMA_NAME }, 'sent_survey',
{ type: Sequelize.BOOLEAN, allowNull: false, defaultValue: false },
{ transaction })
await queryInterface.addColumn({ tableName: 'work_periods', schema: config.DB_SCHEMA_NAME }, 'sent_survey_error',
{
type: Sequelize.JSONB({
errorCode: {
field: 'error_code',
type: Sequelize.INTEGER,
},
errorMessage: {
field: 'error_message',
type: Sequelize.STRING(255)
},
}), allowNull: true }, { transaction })
await queryInterface.sequelize.query(`UPDATE ${config.DB_SCHEMA_NAME}.work_periods SET sent_survey = true where payment_status = 'completed' and end_date <= '${moment().subtract(7, 'days').format('YYYY-MM-DD')}'`,
{ transaction })
await transaction.commit()
} catch (err) {
await transaction.rollback()
throw err
}
},
down: async (queryInterface, Sequelize) => {
const transaction = await queryInterface.sequelize.transaction()
try {
await queryInterface.removeColumn({ tableName: 'resource_bookings', schema: config.DB_SCHEMA_NAME }, 'send_weekly_survey', { transaction })
await queryInterface.removeColumn({ tableName: 'work_periods', schema: config.DB_SCHEMA_NAME }, 'sent_survey', { transaction })
await queryInterface.removeColumn({ tableName: 'work_periods', schema: config.DB_SCHEMA_NAME }, 'sent_survey_error', { transaction } )
await transaction.commit()
} catch (err) {
await transaction.rollback()
throw err
}
},
}
<file_sep>/*
* Logger for the RCRM sync script.
*/
const logger = require('../common/logger')
module.exports = logger
<file_sep>/* eslint-disable no-unused-expressions */
process.env.NODE_ENV = 'test'
require('../../src/bootstrap')
const sinon = require('sinon')
const rewire = require('rewire')
const expect = require('chai').expect
const helper = rewire('../../src/common/helper')
describe('helper test', () => {
before(() => {
})
beforeEach(() => {
sinon.restore()
})
describe('autoWrapExpress test', () => {
it('autoWrapExpress with sync function', () => {
const fn = () => { return null }
const res = helper.autoWrapExpress(fn)
expect(fn).to.eql(res)
})
it('autoWrapExpress with async function', () => {
const fn = async () => { return null }
const res = helper.autoWrapExpress(fn)
res()
expect(res).to.be.a('function')
})
it('autoWrapExpress with function array', () => {
const fn = [() => { return null }]
const res = helper.autoWrapExpress(fn)
expect(res).to.be.a('array')
})
it('autoWrapExpress with object', () => {
const obj = {
fn: () => { return null }
}
const res = helper.autoWrapExpress(obj)
expect(res).to.be.a('object')
expect(res.fn).to.be.a('function')
})
})
})
|
6d79e60155a3e0d4d73611c37f087c292d021379
|
[
"JavaScript",
"YAML",
"Markdown"
] | 89 |
JavaScript
|
deblasis/taas-apis
|
27eede9438952cc9ef8e386c79d4bd5922a6bdfa
|
8d9d76355a24247a8cf6cd15bcc359716f026b69
|
refs/heads/master
|
<file_sep>package deqo.mteu.MySimpleStack;
import java.lang.String;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.sameInstance;
import static org.junit.Assert.*;
public class SimpleStackImplTest {
SimpleStack simpleStack;
Item item1 = new Item(new String("item1"));
Item item2 = new Item(new String("item2"));
Item item3 = new Item(new Integer(8));
@Before
public void setUp() throws Exception {
simpleStack = new SimpleStackImpl();
System.out.println("Je suis execute avant chaque test");
}
@Test
public void isEmpty() throws Exception {
assertThat(simpleStack.isEmpty(), is(true));
simpleStack.push(item1);
assertThat(simpleStack.isEmpty(), is(false));
}
@Test
public void getSize() throws Exception {
assertThat(simpleStack.getSize(), is(0));
simpleStack.push(item1);
assertThat(simpleStack.getSize(), is(1));
simpleStack.push(item2);
assertThat(simpleStack.getSize(), is(2));
}
@Test
public void push() throws Exception {
Assert.assertEquals(0,simpleStack.getSize());
simpleStack.push(item3);
Assert.assertEquals(1,simpleStack.getSize());
Item o = simpleStack.peek();
Assert.assertTrue(o.getValue() instanceof Integer);
Integer integer = (Integer) o.getValue();
Assert.assertEquals(8,integer.intValue());
Assert.assertEquals(item3.getValue(),integer);
}
@Test
public void peek() throws Exception {
Assert.assertEquals(0,simpleStack.getSize());
simpleStack.push(item3);
Assert.assertEquals(1,simpleStack.getSize());
Item o = simpleStack.peek();
Assert.assertEquals(1,simpleStack.getSize());
Assert.assertTrue(o.getValue() instanceof Integer);
Integer integer = (Integer) o.getValue();
Assert.assertEquals(8,integer.intValue());
Assert.assertEquals(item3.getValue(),integer);
}
@Test
public void pop() throws Exception {
Assert.assertEquals(0,simpleStack.getSize());
simpleStack.push(item3);
Assert.assertEquals(1,simpleStack.getSize());
Item o = simpleStack.pop();
Assert.assertEquals(0,simpleStack.getSize());
Assert.assertTrue(o.getValue() instanceof Integer);
Integer integer = (Integer) o.getValue();
Assert.assertEquals(8,integer.intValue());
Assert.assertEquals(item3.getValue(),integer);
}
}
|
8888a0631d931f5f7fb834ec81fa4474e3e94008
|
[
"Java"
] | 1 |
Java
|
malaikateuhi/my-simple-stack
|
0c6a36414302c1fb35cba4ecee769adf6b0a2ea9
|
7c3dcd9fcde27d984e381d8dce9b0922cce789d5
|
refs/heads/master
|
<repo_name>ppooiiuuyh/ARCNN-FAST<file_sep>/utils.py
"""
Scipy version > 0.18 is needed, due to 'mode' option from scipy.misc.imread function
"""
import matplotlib.pyplot as plt
import scipy.misc
import scipy.ndimage
import scipy.io
import math
import numpy as np
import sys
from imresize import *
import glob, os, re
import cv2
from tqdm import tqdm
import tensorflow as tf
def rgb2ycbcr(im):
'''
xform = np.array([[.299, .587, .114], [-.1687, -.3313, .5], [.5, -.4187, -.0813]])
ycbcr = im.dot(xform.T)
ycbcr[:, :, [1, 2]] += 128
return np.uint8(ycbcr)
'''
return cv2.cvtColor(im,cv2.COLOR_RGB2YCR_CB)[:,:,[0,2,1]]
def ycbcr2rgb(im):
'''
xform = np.array([[1, 0, 1.402], [1, -0.34414, -.71414], [1, 1.772, 0]])
rgb = im.astype(np.float)
rgb[:, :, [1, 2]] -= 128
rgb = rgb.dot(xform.T)
np.putmask(rgb, rgb > 255, 255)
np.putmask(rgb, rgb < 0, 0)
return np.uint8(rgb)
'''
temp = cv2.cvtColor(im[:,:,[0,2,1]], cv2.COLOR_YCR_CB2RGB)
return temp
def augumentation(img_sequence):
augmented_sequence = []
for img in img_sequence:
for _ in range(3):
rot_img = np.rot90(img)
augmented_sequence.append(rot_img)
flipped_img = np.fliplr(img)
for _ in range(4):
rot_flipped_img = np.rot90(flipped_img)
augmented_sequence.append(rot_flipped_img)
img_sequence.extend(augmented_sequence)
return img_sequence
def imsave(image, path):
# image = image - np.min(image)
# image = image / np.max(image)
# image = np.clip(image, 0, 1)
# return plt.imsave(path, image)
return scipy.misc.imsave(path, image) # why different?
# %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
# inpur_setupt_eval. Using Matlab file (.m) upscaled with matlab bicubic
# %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
def input_setup_eval(args, mode):
# ===========================================================
# [input setup] / split image
# ===========================================================
sub_input_sequence = []
sub_label_sequence = []
# ----------------------------------------------------------------
# [input setup] / split image - for trainset and testset
# ----------------------------------------------------------------
if mode == "train" :
inputs_, labels_= get_image("train/" + args.train_subdir, args = args)
for (input_,label_) in zip(inputs_,labels_):
h, w, _ = input_.shape # only for R,G,B image
for x in range(0, h - args.patch_size + 1, args.patch_size):
for y in range(0, w - args.patch_size + 1, args.patch_size):
sub_input = input_[x:x + args.patch_size, y:y + args.patch_size, :]
sub_label = label_[x:x + args.patch_size, y:y + args.patch_size, :]
sub_input_sequence.append(sub_input)
sub_label_sequence.append(sub_label)
return sub_input_sequence, sub_label_sequence
elif mode == "test":
nxy = []
sub_input_sequence, sub_label_sequence = get_image("test/"+args.test_subdir, args=args)
return sub_input_sequence, sub_label_sequence
def get_image(data_path, args):
scale = args.scale
l = glob.glob(os.path.join(data_path, "*"))
l = [f for f in l if re.search("^\d+.mat$", os.path.basename(f))]
img_list = []
for f in l:
if os.path.exists(f):
if os.path.exists(f[:-4] + "_2.mat"): img_list.append([f, f[:-4] + "_2.mat", 2])
if os.path.exists(f[:-4] + "_3.mat"): img_list.append([f, f[:-4] + "_3.mat", 3])
if os.path.exists(f[:-4] + "_4.mat"): img_list.append([f, f[:-4] + "_4.mat", 4])
input_list = []
gt_list = []
for pair in img_list:
mat_dict = scipy.io.loadmat(pair[1])
input_img = None
if "img_" + str(scale) in mat_dict:
input_img = mat_dict["img_" + str(scale)]
else:
continue
if (args.c_dim == 3):
input_img = np.stack([input_img, input_img, input_img], axis=-1)
else:
input_img = np.expand_dims(input_img, axis=-1)
input_list.append(input_img)
gt_img = scipy.io.loadmat(pair[0])['img_raw']
if (args.c_dim == 3):
gt_img = np.stack([gt_img, gt_img, gt_img], axis=-1)
else:
gt_img = gt_img.reshape(gt_img.shape[0], gt_img.shape[1], 1) # np.expand_dims(gt_img,axis=-1)
gt_list.append(gt_img)
return input_list, gt_list
# %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
# %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
# input_setup for demo. For RGB
# %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
def input_setup_demo(args, mode):
# ===========================================================
# [input setup] / split image
# ===========================================================
sub_input_sequence = []
sub_label_sequence = []
# ----------------------------------------------------------------
# [input setup] / split image - for trainset and testset
# ----------------------------------------------------------------
if mode == "train":
data = prepare_data( args=args, mode=mode)
for i in tqdm(range(len(data))):
input_, label_ = preprocess(data[i],args, centercrop=True) # normalized full-size image
h, w, _ = input_.shape
for x in range(0, h - args.patch_size + 1, args.stride_size):
for y in range(0, w - args.patch_size + 1, args.stride_size):
sub_input = input_[x:x + args.patch_size, y:y + args.patch_size, :]
sub_label = label_[x:x + args.patch_size, y:y + args.patch_size, :]
sub_input_sequence.append(sub_input)
sub_label_sequence.append(sub_label)
return sub_input_sequence, sub_label_sequence
elif mode == "test":
data = prepare_data(args=args, mode=mode)
for i in range(len(data)):
input_, label_ = preprocess(data[i], args) # normalized full-size image
sub_input_sequence.append(input_)
sub_label_sequence.append(label_)
return sub_input_sequence, sub_label_sequence
def prepare_data(args, mode):
if mode == "train":
data_dir = os.path.join(os.getcwd(),"dataset", mode, args.train_subdir)
data = glob.glob(os.path.join(data_dir, "*"))
elif mode == "test":
data_dir = os.path.join(os.getcwd(), "dataset",mode, args.test_subdir)
data = glob.glob(os.path.join(data_dir, "*"))
return data
def preprocess(path, args, centercrop = False):
image = plt.imread(path)
if len(image.shape) < 3:
image = np.stack([image] * 3, axis=-1)
image = rgb2ycbcr(image)
if np.max(image) > 1: image = (image / 255).astype(np.float32)
if centercrop : image_croped = image[image.shape[0]//2-90:image.shape[0]//2+90,image.shape[1]//2-90:image.shape[1]//2+90]
else : image_croped = modcrop(image, args.scale)
if args.mode == "train" or args.mode == "test":
label_ = image_croped
input_ = imresize(image_croped, args.scale,output_shape=None)
input_ = imresize(input_, 1/args.scale,output_shape=None)
encode_param = [int(cv2.IMWRITE_JPEG_QUALITY), args.jpgqfactor]
result, encimg = cv2.imencode('.jpg', (ycbcr2rgb(input_) * 255)[..., ::-1], encode_param)
img_ = cv2.imdecode(encimg, 1)[..., ::-1]
input_ = rgb2ycbcr((img_ / 255).astype(np.float32))
#input_ = cv2.resize(image_croped,None,fx=1 / args.scale, fy=1 / args.scale, interpolation = cv2.INTER_AREA)
#input_ = cv2.resize(input_,None,fx=args.scale, fy=args.scale, interpolation = cv2.INTER_CUBIC)
return input_ ,label_
def modcrop(image, scale=3):
"""
To scale down and up the original image, first thing to do is to have no remainder while scaling operation.
We need to find modulo of height (and width) and scale factor.
Then, subtract the modulo from height (and width) of original image size.
There would be no remainder even after scaling operation.
"""
if len(image.shape) == 3:
h, w, _ = image.shape
h = h - np.mod(h, scale)
w = w - np.mod(w, scale)
image = image[0:h, 0:w, :]
else:
h, w = image.shape
h = h - np.mod(h, scale)
w = w - np.mod(w, scale)
image = image[0:h, 0:w]
return image
# %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
'''
batch_labels_ds_co = []
for b in batch_labels:
b = ycbcr2rgb(self.com.inference(b))
encode_param = [int(cv2.IMWRITE_JPEG_QUALITY), self.args.jpgqfactor]
result, encimg = cv2.imencode('.jpg', (b*255)[...,::-1], encode_param)
img_ = cv2.imdecode(encimg, 1)[...,::-1]
img = rgb2ycbcr((img_/255).astype(np.float32))
batch_labels_ds_co.append(img)
batch_labels_ds_co = np.array(batch_labels_ds_co)
batch_labels_ds_co = np.expand_dims(np.array(batch_labels_ds_co)[:,:,:,0],-1)
batch_labels = np.expand_dims(np.array(batch_labels)[:,:,:,0],-1)
'''
<file_sep>/main.py
import argparse
import os
import pprint
import tensorflow as tf
from model_arcnn_fast import ARCNN_FAST
import matplotlib.pyplot as plt
import numpy as np
from imresize import *
if __name__ == '__main__':
# =======================================================
# [global variables]
# =======================================================
pp = pprint.PrettyPrinter()
args = None
DATA_PATH = "./train/"
TEST_DATA_PATH = "./data/test/"
# =======================================================
# [add parser]
# =======================================================
parser = argparse.ArgumentParser()
#===================== common configuration ============================================
parser.add_argument("--exp_tag", type=str, default="ARCNN tensorflow. Implemented by <NAME>")
parser.add_argument("--gpu", type=int, default=0) # -1 for CPU
parser.add_argument("--epoch", type=int, default=80)
parser.add_argument("--batch_size", type=int, default=128)
parser.add_argument("--patch_size", type=int, default=24)
parser.add_argument("--stride_size", type=int, default=20)
parser.add_argument("--deconv_stride", type = int, default = 2)
parser.add_argument("--scale", type=int, default=1)
parser.add_argument("--jpgqfactor", type= int, default =60)
parser.add_argument("--train_subdir", default="BSD400")
parser.add_argument("--test_subdir", default="Set5")
parser.add_argument("--infer_imgpath", default="monarch.bmp") # monarch.bmp
parser.add_argument("--type", default="YCbCr", choices=["RGB","Gray","YCbCr"])#YCbCr type uses images preprocessesd by matlab
parser.add_argument("--c_dim", type=int, default=3) # 3 for RGB, 1 for Y chaanel of YCbCr (but not implemented yet)
parser.add_argument("--mode", default="train", choices=["train", "test", "inference", "test_plot"])
parser.add_argument("--base_lr", type=float, default=1e-5)
parser.add_argument("--min_lr", type=float, default=1e-6)
parser.add_argument("--lr_decay_rate", type=float, default=1e-1)
parser.add_argument("--lr_step_size", type=int, default=20) # 9999 for no decay
parser.add_argument("--checkpoint_dir", default="checkpoint")
parser.add_argument("--cpkt_itr", default=0) # -1 for latest, set 0 for training from scratch
parser.add_argument("--save_period", type=int, default=1)
parser.add_argument("--result_dir", default="result")
parser.add_argument("--save_extension", default=".jpg", choices=["jpg", "png"])
print("=====================================================================")
args = parser.parse_args()
if args.type == "YCbCr":
args.c_dim = 1; #args.train_subdir += "_M"; args.test_subdir += "_M"
elif args.type == "RGB":
args.c_dim = 3;
elif args.type == "Gray":
args.c_dim = 1
print("Eaxperiment tag : " + args.exp_tag)
pp.pprint(args)
print("=====================================================================")
# =======================================================
# [make directory]
# =======================================================
if not os.path.exists(os.path.join(os.getcwd(), args.checkpoint_dir)):
os.makedirs(os.path.join(os.getcwd(), args.checkpoint_dir))
if not os.path.exists(os.path.join(os.getcwd(), args.result_dir)):
os.makedirs(os.path.join(os.getcwd(), args.result_dir))
# =======================================================
# [Main]
# =======================================================
# -----------------------------------
# system configuration
# -----------------------------------
os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"] = str(args.gpu)
config = tf.ConfigProto()
if args.gpu == -1: config.device_count = {'GPU': 0}
config.gpu_options.per_process_gpu_memory_fraction = 0.3
# config.operation_timeout_in_ms=10000
# -----------------------------------
# build model
# -----------------------------------
with tf.Session(config = config) as sess:
arcnn = ARCNN_FAST(sess = sess, args = args)
# -----------------------------------
# train, test, inferecnce
# -----------------------------------
if args.mode == "train":
arcnn.train()
elif args.mode == "test":
arcnn.test()
elif args.mode == "inference":
pass
elif args.mode == "test_plot":
pass
<file_sep>/README.md
# [!This repository is on fixing. Please refer to branch]
# ARCNN-FAST tensorflow
[Deep Convolution Networks for Compression Artifacts Reduction] (https://arxiv.org/pdf/1608.02778.pdf)
#### ARCNN model architecture
</p>
<p align="center">
<img src="https://raw.githubusercontent.com/ppooiiuuyh/ARCNN-FAST/master/asset/arcnnmodel.png" width="600">
</p>
</p>
<p align="center">
<img src="https://raw.githubusercontent.com/ppooiiuuyh/ARCNN-FAST/master/asset/arcnnmodel2.png" width="600">
</p>
#### ARCNN result (original, jpg20, reconstructed by arcnn on jpg20)
</p>
<p align="center">
<img src="https://raw.githubusercontent.com/ppooiiuuyh/ARCNN-FAST/master/asset/arcnnresult.png" width="600">
</p>
#### loss history
</p>
<p align="center">
<img src="https://raw.githubusercontent.com/ppooiiuuyh/ARCNN-FAST/master/asset/losshist.png" width="600">
</p>
## Prerequisites
* python 3.x
* Tensorflow > 1.5
* matplotlib
* argparse
* opencv2
## Properties (what's different from reference code)
* This code requires Tensorflow. This code was fully implemented based on Python 3
* This implements supports color channels based on YCbCr.
* This implements reached PSRN score over paper baseline (on Y channel)
## Author
<NAME>
<file_sep>/metrics.py
import sys
import numpy as np
from scipy import signal
from scipy import ndimage
import math
#=======================================================================================
# metric
#=======================================================================================
def psnr(target, ref, max, scale):
# assume RGB image
scale += 6
target_data = np.array(target)
target_data = target_data[scale:-scale-1, scale:-scale-1]
ref_data = np.array(ref)
ref_data = ref_data[scale:-scale-1, scale:-scale-1]
diff = (ref_data - target_data) ** 2
diff = diff.flatten('C')
rmse = math.sqrt(np.mean(diff))
return 20 * math.log10(max / rmse)
#===========================================================================
# \structural similarity
#===========================================================================
def gaussian2(size, sigma):
"""Returns a normalized circularly symmetric 2D gauss kernel array
f(x,y) = A.e^{-(x^2/2*sigma^2 + y^2/2*sigma^2)} where
A = 1/(2*pi*sigma^2)
as define by Wolfram Mathworld
http://mathworld.wolfram.com/GaussianFunction.html
"""
A = 1 / (2.0 * np.pi * sigma ** 2)
x, y = np.mgrid[-size // 2 + 1:size // 2 + 1, -size // 2 + 1:size // 2 + 1]
g = A * np.exp(-((x ** 2 / (2.0 * sigma ** 2)) + (y ** 2 / (2.0 * sigma ** 2))))
return g
def fspecial_gauss(size, sigma):
"""Function to mimic the 'fspecial' gaussian MATLAB function
"""
x, y = np.mgrid[-size // 2 + 1:size // 2 + 1, -size // 2 + 1:size // 2 + 1]
g = np.exp(-((x ** 2 + y ** 2) / (2.0 * sigma ** 2)))
return g / g.sum()
# original implementation : https://github.com/mubeta06/python/blob/master/signal_processing/sp/ssim.py
def ssim(img1, img2, max=1, scale=0, cs_map=False):
"""Return the Structural Similarity Map corresponding to input images img1
and img2 (images are assumed to be uint8)
This function attempts to mimic precisely the functionality of ssim.m a
MATLAB provided by the author's of SSIM
https://ece.uwaterloo.ca/~z70wang/research/ssim/ssim_index.m
"""
scale+=6
img1 = img1.astype(np.float64)[scale:-scale-1,scale:-scale-1]
img2 = img2.astype(np.float64)[scale:-scale-1,scale:-scale-1]
size = 11
sigma = 1.5
window = fspecial_gauss(size, sigma)
#window = np.stack([window]*3, axis=-1)
K1 = 0.01
K2 = 0.03
L = max # bitdepth of image
C1 = (K1 * L) ** 2
C2 = (K2 * L) ** 2
mu1 = signal.fftconvolve(window, img1, mode='valid')
mu2 = signal.fftconvolve(window, img2, mode='valid')
mu1_sq = mu1 * mu1
mu2_sq = mu2 * mu2
mu1_mu2 = mu1 * mu2
sigma1_sq = signal.fftconvolve(window, img1 * img1, mode='valid') - mu1_sq
sigma2_sq = signal.fftconvolve(window, img2 * img2, mode='valid') - mu2_sq
sigma12 = signal.fftconvolve(window, img1 * img2, mode='valid') - mu1_mu2
if cs_map:
return (((2 * mu1_mu2 + C1) * (2 * sigma12 + C2)) / ((mu1_sq + mu2_sq + C1) *
(sigma1_sq + sigma2_sq + C2)),
(2.0 * sigma12 + C2) / (sigma1_sq + sigma2_sq + C2))
else:
temp = ((2 * mu1_mu2 + C1) * (2 * sigma12 + C2)) / ((mu1_sq + mu2_sq + C1) *
(sigma1_sq + sigma2_sq + C2))
return np.mean(temp)
def msssim(img1, img2, max=1):
"""This function implements Multi-Scale Structural Similarity (MSSSIM) Image
Quality Assessment according to Z. Wang's "Multi-scale structural similarity
for image quality assessment" Invited Paper, IEEE Asilomar Conference on
Signals, Systems and Computers, Nov. 2003
Author's MATLAB implementation:-
http://www.cns.nyu.edu/~lcv/ssim/msssim.zip
"""
level = 5
weight = np.array([0.0448, 0.2856, 0.3001, 0.2363, 0.1333])
downsample_filter = np.ones((2, 2)) / 4.0
im1 = img1.astype(np.float64)
im2 = img2.astype(np.float64)
mssim = np.array([])
mcs = np.array([])
for l in range(level):
ssim_map, cs_map = ssim(im1, im2, max = max, cs_map=True)
mssim = np.append(mssim, ssim_map.mean())
mcs = np.append(mcs, cs_map.mean())
filtered_im1 = ndimage.filters.convolve(im1, downsample_filter,
mode='reflect')
filtered_im2 = ndimage.filters.convolve(im2, downsample_filter,
mode='reflect')
im1 = filtered_im1[::2, ::2]
im2 = filtered_im2[::2, ::2]
return (np.prod(mcs[0:level - 1] ** weight[0:level - 1]) *
(mssim[level - 1] ** weight[level - 1]))
<file_sep>/model_arcnn_fast.py
import os
import time
import tensorflow as tf
from utils import *
from imresize import *
from metrics import *
import matplotlib.pyplot as plt
import pprint
import math
import numpy as np
import sys
import glob
from tqdm import tqdm
class ARCNN_FAST(object):
# ==========================================================
# class initializer
# ==========================================================
def __init__(self, sess, args):
self.sess = sess
self.args = args
self.preprocess()
self.model()
self.other_tensors()
self.init_model()
# ==========================================================
# preprocessing
# ==========================================================
def preprocess(self):
self.train_label = []
self.train_input = []
self.test_label = []
self.test_input = []
if self.args.type == "YCbCr" : input_setup = input_setup_demo
elif self.args.type == "RGB" : input_setup = input_setup_demo
else : input_setup = input_setup_demo
# scale augmentation
scale_temp = self.args.scale
for s in [1, 0.9, 0.8, 0.7, 0.6]:
self.args.scale = s
train_input_, train_label_ = input_setup(self.args, mode="train")
self.train_label.extend(train_label_)
self.train_input.extend(train_input_)
self.args.scale = scale_temp
# augmentation (rotation, miror flip)
self.train_label = augumentation(self.train_label)
self.train_input = augumentation(self.train_input)
# setup test data
self.test_input, self.test_label = input_setup(self.args, mode="test")
pass
# ==========================================================
# build model
# ==========================================================
def model(self):
with tf.variable_scope("ARCNN_FAST") as scope:
shared_inner_model_template = tf.make_template('shared_model', self.inner_model)
#self.images = tf.placeholder(tf.float32, [None, self.args.patch_size, self.args.patch_size, self.args.c_dim], name='images')
self.images = tf.placeholder(tf.float32, [None, self.args.patch_size, self.args.patch_size, self.args.c_dim], name='images')
self.labels = tf.placeholder(tf.float32, [None, self.args.patch_size, self.args.patch_size, self.args.c_dim], name='labels')
self.pred = shared_inner_model_template(self.images)
#self.image_test = tf.placeholder(tf.float32, [1, None, None, self.args.c_dim], name='images_test')
self.image_test = tf.placeholder(tf.float32, [1, None, None, self.args.c_dim], name='image_test')
self.label_test = tf.placeholder(tf.float32, [1, None, None, self.args.c_dim], name='labels_test')
self.pred_test = shared_inner_model_template(self.image_test)
# ===========================================================
# inner model
# ===========================================================
def inner_model(self, inputs):
# ----------------------------------------------------------------------------------
# input layer
# ------------------------------------------------------------------------------------------
with tf.variable_scope("feature_extraction") as scope:
conv_w = tf.get_variable("conv_w", [9, 9, self.args.c_dim, 64], initializer=tf.contrib.layers.xavier_initializer())
conv_b = tf.get_variable("conv_b", [64], initializer=tf.constant_initializer(0))
layer = tf.nn.bias_add(tf.nn.conv2d(inputs, conv_w, strides=[1, 1, 1, 1], padding='SAME'), conv_b)
alpha = tf.get_variable("prelu_alpha",[1],initializer=tf.constant_initializer(0.2))
layer = tf.nn.leaky_relu(layer,alpha=alpha)
with tf.variable_scope("feature_enhancement") as scope:
conv_w = tf.get_variable("conv_w_shrink", [1, 1, 64, 32], initializer=tf.contrib.layers.xavier_initializer())
conv_b = tf.get_variable("conv_b_shrink", [32], initializer=tf.constant_initializer(0))
layer = tf.nn.bias_add(tf.nn.conv2d(layer, conv_w, strides=[1, self.args.deconv_stride, self.args.deconv_stride, 1], padding='SAME'), conv_b)
alpha = tf.get_variable("prelu_alpha",[1],initializer=tf.constant_initializer(0.2))
layer = tf.nn.leaky_relu(layer,alpha=alpha)
conv_w = tf.get_variable("conv_w_enhance", [7, 7, 32, 32], initializer=tf.contrib.layers.xavier_initializer())
conv_b = tf.get_variable("conv_b_enhance", [32], initializer=tf.constant_initializer(0))
layer = tf.nn.bias_add(tf.nn.conv2d(layer, conv_w, strides=[1, 1, 1, 1], padding='SAME'), conv_b)
alpha = tf.get_variable("prelu_alpha2",[1],initializer=tf.constant_initializer(0.2))
layer = tf.nn.leaky_relu(layer,alpha=alpha)
with tf.variable_scope("mapping") as scope:
conv_w = tf.get_variable("conv_w_", [1, 1, 32, 64], initializer=tf.contrib.layers.xavier_initializer())
conv_b = tf.get_variable("conv_b", [64], initializer=tf.constant_initializer(0))
layer = tf.nn.bias_add(tf.nn.conv2d(layer, conv_w, strides=[1, 1, 1, 1], padding='SAME'), conv_b)
alpha = tf.get_variable("prelu_alpha",[1],initializer=tf.constant_initializer(0.2))
layer = tf.nn.leaky_relu(layer,alpha=alpha)
with tf.variable_scope("reconstruction") as scope:
#conv_w = tf.get_variable("conv_w_", [1, 1, 64, 1], initializer=tf.contrib.layers.xavier_initializer())
#conv_b = tf.get_variable("conv_b", [1], initializer=tf.constant_initializer(0))
#layer = tf.nn.bias_add(tf.nn.conv2d(layer, conv_w, strides=[1, 1, 1, 1], padding='SAME'), conv_b)
conv_w = tf.get_variable("conv_w", [9, 9, self.args.c_dim,64], initializer=tf.contrib.layers.xavier_initializer())
conv_b = tf.get_variable("conv_b", [self.args.c_dim], initializer=tf.constant_initializer(0))
layer = tf.nn.bias_add(tf.nn.conv2d_transpose(layer, conv_w, strides=[1, self.args.deconv_stride, self.args.deconv_stride, 1], padding='SAME', output_shape=tf.shape(inputs)), conv_b)
pred = layer +inputs
return pred
# ----------------------------------------------------------------------------------------
# ============================================================
# other tensors related with training
# ============================================================
def other_tensors(self):
with tf.variable_scope("trainer") as scope:
self.global_step = tf.Variable(0, trainable=False, name="global_step")
self.loss = tf.reduce_mean(tf.square(self.pred - self.labels)) # L1 is betther than L2
self.learning_rate = tf.maximum(tf.train.exponential_decay(self.args.base_lr, self.global_step,
len(self.train_label) // self.args.batch_size * self.args.lr_step_size,
self.args.lr_decay_rate,
staircase=True),
self.args.min_lr) # stair case showed better result
self.train_op = tf.train.AdamOptimizer(self.learning_rate).minimize(self.loss, global_step=self.global_step)
# tensor board
self.summary_writer = tf.summary.FileWriter("./board", self.sess.graph)
self.loss_history = tf.summary.scalar("loss", self.loss)
self.summary = tf.summary.merge_all()
self.psnr_history = []
self.ssim_history = []
# ============================================================
# init tensors
# ============================================================
def init_model(self):
self.sess.run(tf.global_variables_initializer())
self.saver = tf.train.Saver(max_to_keep=0)
if self.cpkt_load(self.args.checkpoint_dir, self.args.cpkt_itr):
print(" [*] Load SUCCESS")
else:
print(" [!] Load failed...")
def cpkt_save(self, checkpoint_dir, step):
model_name = "checks.model"
model_dir = "checks"
checkpoint_dir = os.path.join(checkpoint_dir, self.args.type, model_dir)
if not os.path.exists(checkpoint_dir):
os.makedirs(checkpoint_dir)
self.saver.save(self.sess,
os.path.join(checkpoint_dir, model_name),
global_step=step)
def cpkt_load(self, checkpoint_dir, checkpoint_itr):
print(" [*] Reading checkpoints...")
model_dir = "checks"
checkpoint_dir = os.path.join(checkpoint_dir, self.args.type, model_dir)
if checkpoint_itr == 0:
print("train from scratch")
return True
elif checkpoint_dir == -1:
ckpt = tf.train.latest_checkpoint(checkpoint_dir)
else:
ckpt = os.path.join(checkpoint_dir, "checks.model-" + str(checkpoint_itr))
print(ckpt)
if ckpt:
self.saver.restore(self.sess, ckpt)
return True
else:
return False
# ==========================================================
# functions
# ==========================================================
def inference(self, input_img):
if (np.max(input_img) > 1): input_img = (input_img / 255).astype(np.float32)
size = input_img.shape
if (len(input_img.shape) == 3):
infer_image_input = input_img[:, :, 0].reshape(1, size[0], size[1], 1)
else:
infer_image_input = input_img.reshape(1, size[0], size[1], 1)
sr_img = self.sess.run(self.pred_test, feed_dict={self.image_test: infer_image_input})
# sr_img = np.expand_dims(sr_img,axis=-1)
input_img = imresize(input_img,self.args.scale)
if (len(input_img.shape) == 3):
input_img[:, :, 0] = sr_img[0, :, :, 0]
else:
input_img = sr_img[0]
return input_img #return as ycbcr
# ==========================================================
# train
# ==========================================================
def train(self):
self.test()
print("Training...")
start_time = time.time()
for ep in range(self.args.epoch):
# =============== shuffle and prepare batch images ============================
seed = int(time.time())
np.random.seed(seed); np.random.shuffle(self.train_label)
np.random.seed(seed); np.random.shuffle(self.train_input)
#================ train rec ===================================================
batch_idxs = len(self.train_label) // self.args.batch_size
for idx in tqdm(range(0, batch_idxs)):
batch_labels = np.expand_dims(np.array(self.train_label[idx * self.args.batch_size: (idx + 1) * self.args.batch_size])[:,:,:,0],-1)
batch_inputs = np.expand_dims(np.array(self.train_input[idx * self.args.batch_size: (idx + 1) * self.args.batch_size])[:,:,:,0],-1)
feed = {self.images: batch_inputs, self.labels:batch_labels}
_, err, lr, summary = self.sess.run( [self.train_op, self.loss, self.learning_rate, self.summary], feed_dict=feed)
self.summary_writer.add_summary(summary,self.global_step.eval())
#=============== print log =====================================================
if ep % 1 == 0:
print("Epoch: [%2d], step: [%2d], time: [%4.4f], loss_com: [%.8f], lr: [%.8f]" \
% ((ep + 1), self.global_step.eval(), time.time() - start_time, np.mean(err), lr))
self.test()
#================ save checkpoints ===============================================
if ep % self.args.save_period == 0:
self.cpkt_save(self.args.checkpoint_dir, ep + 1)
# ==========================================================
# test
# ==========================================================
def test(self):
print("Testing...")
psnrs_preds = []
ssims_preds = []
preds = []
labels = []
images = []
for idx in range(0, len(self.test_label)):
test_label = np.array(self.test_label[idx]) #none,none,3
test_input = np.array(self.test_input[idx])
# === original =====
for f in [5, 10, 20, 40, 50, 60, 80, 100]:
cv2.imwrite(os.path.join(self.args.result_dir, str(idx) + "ori" + str(f) + ".jpg"),
(ycbcr2rgb(test_label)*255)[..., ::-1], [int(cv2.IMWRITE_JPEG_QUALITY), f])
cv2.imwrite(os.path.join(self.args.result_dir, str(idx) + "ori.PNG"), (ycbcr2rgb(test_label)*255)[..., ::-1])
# ==================
result = self.inference(test_input)
cv2.imwrite(os.path.join(self.args.result_dir,str(idx)+ "rec"+str(self.args.jpgqfactor)+".bmp"), (ycbcr2rgb(result)*255)[...,::-1])
preds.append(result)
labels.append(test_label)
# cal PSNRs for each images upscaled from different depths
for i in range(len(self.test_label)):
if len(np.array(labels[i]).shape)==3 : labels[i] = np.array(labels[i])[:,:,0]
if len(np.array(preds[i]).shape)==3 : preds[i] = np.array(preds[i])[:,:,0]
psnrs_preds.append(psnr(labels[i], preds[i], max=1.0, scale=self.args.scale))
ssims_preds.append(ssim(labels[i], preds[i], max=1.0, scale=self.args.scale))
# print evalutaion results
print("===================================================================================")
print("PSNR: " + str(round(np.mean(np.clip(psnrs_preds, 0, 100)), 3)) + "dB")
print("SSIM: " + str(round(np.mean(np.clip(ssims_preds, 0, 100)), 5)))
print("===================================================================================")
self.psnr_history.append(str(round(np.mean(np.clip(psnrs_preds, 0, 100)), 3)))
self.ssim_history.append(str(round(np.mean(np.clip(ssims_preds, 0, 100)), 5)))
print()
for h in self.psnr_history:
print(h, ",", end="")
print()
print()
for h in self.ssim_history:
print(h, ",", end="")
print()
|
1db9e9e868f11cb9439d8038d23cf7a607295809
|
[
"Markdown",
"Python"
] | 5 |
Python
|
ppooiiuuyh/ARCNN-FAST
|
1631c40bb2d6d700dd915a9dcb41cdc207cf52b9
|
062e8b644247ddf5bc3d21d98269c0d3aacec558
|
refs/heads/master
|
<file_sep>NEI <- readRDS("summarySCC_PM25.rds")
SCC <- readRDS("Source_Classification_Code.rds")
baltimoreNEI <- subset(NEI,NEI$fips == "24510")
baltimoreyearsum <- tapply(baltimoreNEI$Emissions,baltimoreNEI$year,sum)
baltimoreemissionsbytime <- data.frame(years = names(baltimoreyearsum),emissions = baltimoreyearsum)
png(filename = "plot2.png")
with(baltimoreemissionsbytime,plot(years,emissions,type="n", main = "Total Baltimore Emissions"))
lines(baltimoreemissionsbytime$years,baltimoreemissionsbytime$emissions)
dev.off()<file_sep>NEI <- readRDS("summarySCC_PM25.rds")
SCC <- readRDS("Source_Classification_Code.rds")
yearsum <- tapply(NEI$Emissions,NEI$year,sum)
temissionsbytime <- data.frame(years = names(yearsum),emissions = yearsum)
png(filename = "plot1.png")
with(temissionsbytime,plot(years,emissions,type="n",main = "Total US Emissions"))
lines(temissionsbytime$years,temissionsbytime$emissions)
dev.off()<file_sep>NEI <- readRDS("summarySCC_PM25.rds")
SCC <- readRDS("Source_Classification_Code.rds")
library(dplyr)
library(ggplot2)
Merged <- merge(NEI,SCC,by.x = "SCC",by.y = "SCC")
MotorVehicleNames <-unique(SCC$EI.Sector)[21:24]
MotorVehicle <- Merged[Merged$EI.Sector %in% MotorVehicleNames,]
MotorVehicleBalt <- subset(MotorVehicle,MotorVehicle$fips == "24510")
vehicleyearsum <- tapply(MotorVehicleBalt$Emissions,MotorVehicleBalt$year,sum)
baltvehicleemissionsbytime <- data.frame(years = names(vehicleyearsum),emissions = vehicleyearsum)
png(filename = "plot5.png")
with(baltvehicleemissionsbytime,plot(years,emissions,type="n",main = "Baltimore Motor Vehicle Emissions"))
lines(baltvehicleemissionsbytime$years,baltvehicleemissionsbytime$emissions)
dev.off()
<file_sep>NEI <- readRDS("summarySCC_PM25.rds")
SCC <- readRDS("Source_Classification_Code.rds")
library(dplyr)
library(ggplot2)
Merged <- merge(NEI,SCC,by.x = "SCC",by.y = "SCC")
MotorVehicleNames <-unique(SCC$EI.Sector)[21:24]
MotorVehicle <- Merged[Merged$EI.Sector %in% MotorVehicleNames,]
MotorVehicleBalt <- subset(MotorVehicle,MotorVehicle$fips == "24510")
BaltVehYearSum <- tapply(MotorVehicleBalt$Emissions,MotorVehicleBalt$year,sum)
BaltVehEmissions <- data.frame(years = names(BaltVehYearSum),emissions = BaltVehYearSum)
BaltVehEmissions$City <- "Baltimore"
MotorVehicleLA <- subset(MotorVehicle,MotorVehicle$fips == "06037")
LAVehYearSum <- tapply(MotorVehicleLA$Emissions,MotorVehicleLA$year,sum)
LAVehEmissions <- data.frame(years = names(LAVehYearSum),emissions = LAVehYearSum)
LAVehEmissions$City <- "Los Angeles"
BaltAndLA <- rbind(BaltVehEmissions,LAVehEmissions)
png(filename = "plot6.png")
ggplot(data=BaltAndLA,aes(x=years,y=emissions,color=City,group=City)) +
geom_point() +
geom_line() +
ggtitle("Motor Vehicle Emissions: LA vs Baltimore") +
theme(plot.title = element_text(hjust = 0.5))
dev.off()
<file_sep>NEI <- readRDS("summarySCC_PM25.rds")
SCC <- readRDS("Source_Classification_Code.rds")
library(dplyr)
library(ggplot2)
Merged <- merge(NEI,SCC,by.x = "SCC",by.y = "SCC")
CoalNames <- unique(SCC$EI.Sector)[grep("Coal",unique(SCC$EI.Sector))]
CoalSources <- Merged[Merged$EI.Sector %in% CoalNames,]
coalyearsum <- tapply(CoalSources$Emissions,CoalSources$year,sum)
tcoalemissionsbytime <- data.frame(years = names(coalyearsum),emissions = coalyearsum)
png(filename = "plot4.png")
with(tcoalemissionsbytime,plot(years,emissions,type="n",main = "Total US Coal Emissions"))
lines(tcoalemissionsbytime$years,tcoalemissionsbytime$emissions)
dev.off()<file_sep>NEI <- readRDS("summarySCC_PM25.rds")
SCC <- readRDS("Source_Classification_Code.rds")
yearsum <- tapply(NEI$Emissions,NEI$year,sum)
temissionsbytime <- data.frame(years = names(yearsum),emissions = yearsum)
with(temissionsbytime,plot(years,emissions,type="n",main = "Total US Emissions"))
lines(temissionsbytime$years,temissionsbytime$emissions)
baltimoreNEI <- subset(NEI,NEI$fips == "24510")
baltimoreyearsum <- tapply(baltimoreNEI$Emissions,baltimoreNEI$year,sum)
baltimoreemissionsbytime <- data.frame(years = names(baltimoreyearsum),emissions = baltimoreyearsum)
with(baltimoreemissionsbytime,plot(years,emissions,type="n", main = "Total Baltimore Emissions"))
lines(baltimoreemissionsbytime$years,baltimoreemissionsbytime$emissions)
baltimoreNEI$typeyear <- paste(baltimoreNEI$type,baltimoreNEI$year)
baltsums <- tapply(baltimoreNEI$Emissions,baltimoreNEI$typeyear,sum)
baltsumsframe <- data.frame(concat = names(baltsums), emissions = baltsums)
categories <- data.frame(t(data.frame(strsplit(baltsumsframe$concat," "))))
categories <- categories %>% rename(type = X1, year = X2)
baltsumsframe <- cbind(categories,baltsumsframe)
ggplot(baltsumsframe,aes(x=year,y=emissions,color=type,group=type)) +
geom_point() +
geom_line()
library(dplyr)
library(ggplot2)
NEISumbyType <- data.frame()
for (i in unique(baltimoreNEI$type)) {
NEIloopsubset <- subset(baltimoreNEI,baltimoreNEI$type == i)
NEIlooptypesum <- tapply(NEIloopsubset$Emissions,NEIloopsubset$year,sum)
NEISumbyType <- rbind(NEISumbyType,NEIlooptypesum)
}
names(NEISumbyType) <- names(baltimoreyearsum)
NEISumbyTypeTranspose <- data.frame(t(NEISumbyType))
names(NEISumbyTypeTranspose) <- unique(baltimoreNEI$type)
NEISumbyTypeTranspose <- cbind(NEISumbyTypeTranspose,names(yearsum))
NEISumbyTypeTranspose <- NEISumbyTypeTranspose %>% rename(years = `names(yearsum)`,ONROAD = 'ON-ROAD',NONROAD = 'NON-ROAD')
ggplot(NEISumbyTypeTranspose,aes(x=years, group = 1)) +
geom_line(aes(y=POINT),color="red") +
geom_line(aes(y=NONPOINT),color="blue") +
geom_line(aes(y=ONROAD),color="green") +
geom_line(aes(y=NONROAD),color="purple") +
geom_lege
row.names(NEISumbyTypeTranspose)<file_sep>NEI <- readRDS("summarySCC_PM25.rds")
SCC <- readRDS("Source_Classification_Code.rds")
library(dplyr)
library(ggplot2)
baltimoreNEI <- subset(NEI,NEI$fips == "24510")
baltimoreNEI$typeyear <- paste(baltimoreNEI$type,baltimoreNEI$year)
baltsums <- tapply(baltimoreNEI$Emissions,baltimoreNEI$typeyear,sum)
baltsumsframe <- data.frame(concat = names(baltsums), emissions = baltsums)
categories <- data.frame(t(data.frame(strsplit(baltsumsframe$concat," "))))
categories <- categories %>% rename(type = X1, year = X2)
baltsumsframe <- cbind(categories,baltsumsframe)
png(filename = "plot3.png")
ggplot(baltsumsframe,aes(x=year,y=emissions,color=type,group=type)) +
geom_point() +
geom_line()
dev.off()
|
09775838466d0a3979d04039306c734ee2e45b82
|
[
"R"
] | 7 |
R
|
willheitman/ExData-Final-Project
|
7d57ddbf82f05cab56a62d063489c88a92d1d219
|
b28e5c656633a3e8008974ecd4a9223eba0b65a2
|
refs/heads/master
|
<repo_name>justin9503/Justin<file_sep>/ConsoleApp1/Program.cs
using System;
class GFG
{
class Point
{
public int x, y;
}
static bool doOverlap(Point l1, Point r1,
Point l2, Point r2)
{
if (l1.x > r2.x || l2.x > r1.x)
{
return false;
}
if (l1.y < r2.y || l2.y < r1.y)
{
return false;
}
return true;
}
public static void Main()
{
Point l1 = new Point(), r1 = new Point(),
l2 = new Point(), r2 = new Point();
l1.x = 0; l1.y = 10; r1.x = 10; r1.y = 0;
l2.x = 5; l2.y = 5; r2.x = 15; r2.y = 0;
if (doOverlap(l1, r1, l2, r2))
{
Console.WriteLine("Rectangles Overlap");
}
else
{
Console.WriteLine("Rectangles Do not Overlap");
}
}
}
|
7e3d55d5863a51a81bba44c94cdf1b2b2e113360
|
[
"C#"
] | 1 |
C#
|
justin9503/Justin
|
74eb9cce0eb7b2836342e62dd9c976a936e1e588
|
66682616fb44f53b5f107af84f6dea5eeb6f90e5
|
refs/heads/master
|
<file_sep>package hm.moe.lws2d.chargefoodmod;
import java.util.HashMap;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.inventory.IInventory;
import net.minecraft.item.EnumAction;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraft.stats.StatList;
import net.minecraft.world.World;
public class ItemChargeFood extends FPRootPoweredItem {
private HashMap<String, Integer> map = new HashMap<String, Integer>(){
{
put("item.apple", 4);
put("item.bread", 5);
put("item.porkchopRaw", 3);
put("item.porkchopCooked", 8);
put("item.cookie", 2);
put("item.melon", 2);
put("item.beefRaw", 3);
put("item.beefCooked", 8);
put("item.chickenRaw", 2);
put("item.chickenCooked", 6);
put("item.rottenFlesh", 4);
put("item.spiderEye", 2);
put("item.potatoBaked", 5);
put("item.potatoPoisonous", 2);
//put("item.carrotGolden", 6);
put("item.pumpkinPie", 8);
put("item.rabbitRaw", 3);
put("item.rabbitCooked", 5);
put("item.muttonRaw", 2);
put("item.muttonCooked", 6);
put("item.mushroomStew", 6);
put("item.rabbitStew", 10);
//put("item.appleGold", 4);
put("item.carrots", 3);
put("item.potato", 1);
put("item.fish.pufferfish.raw", 1);
put("item.fish.clownfish.raw", 1);
put("item.fish.salmon.raw", 2);
put("item.fish.cod.raw", 2);
put("item.fish.salmon.cooked", 6);
put("item.fish.cod.cooked", 5);
}
};
public static int powerCapacity = 100000;
public ItemChargeFood(float par2, boolean par3)
{
super(powerCapacity, par2, par3);
this.setUnlocalizedName("chargeFood"); //システム名の登録
this.setMaxStackSize(1); //スタックできる量
}
@Override
public EnumAction getItemUseAction(ItemStack var1)
{
return EnumAction.DRINK;
}
@Override
public ItemStack onItemUseFinish(ItemStack stack, World worldIn, EntityPlayer playerIn)
{
float heals = 20 - playerIn.getFoodStats().getFoodLevel();
System.out.println("heals : " + heals);
if(super.getFPCurrentPower(stack)>=heals*10){
usePower(heals*10, stack);
playerIn.getFoodStats().addStats((int)heals, this.getSaturationModifier(stack));
worldIn.playSoundAtEntity(playerIn, "random.burp", 0.5F, worldIn.rand.nextFloat() * 0.1F + 0.9F);
this.onFoodEaten(stack, worldIn, playerIn);
playerIn.triggerAchievement(StatList.objectUseStats[Item.getIdFromItem(this)]);
}
return stack;
}
@Override
public ItemStack onItemRightClick(ItemStack itemStackIn, World worldIn, EntityPlayer playerIn)
{
double currentPower = super.getFPCurrentPower(itemStackIn);
if( playerIn.isSneaking()){
double maxPower = super.getFPMaxPower(itemStackIn);
if(currentPower < maxPower){
if (playerIn != null && playerIn.inventory instanceof IInventory) {
IInventory inv = (IInventory)playerIn.inventory;
int charges = 0;
for(int i=0;i < inv.getSizeInventory(); i++){
ItemStack item = inv.getStackInSlot(i);
if(item!=null){
System.out.println(item.getUnlocalizedName());
if(map.get(item.getUnlocalizedName()) != null){
if (item.getItem().getItemUseAction(null) == EnumAction.EAT && map.get(item.getUnlocalizedName()) > 0) {
charges += item.stackSize * map.get(item.getUnlocalizedName());
inv.setInventorySlotContents(i, null);
if(currentPower + charges*10 > maxPower) break;
}
}
}
}
if(charges == 0){
playerIn.addChatMessage(PlayerMessages.noFood.get());
return itemStackIn;
}
this.chargePower(charges * 10, itemStackIn);
worldIn.playSoundAtEntity(playerIn, "random.burp", 0.5F, worldIn.rand.nextFloat() * 0.1F + 0.9F);
}
}
}
else{
if (playerIn.canEat(false) && currentPower >= 10)
{
playerIn.setItemInUse(itemStackIn, this.getMaxItemUseDuration(itemStackIn));
}
}
return itemStackIn;
}
public boolean usePower(final float amount, final ItemStack is )
{
return this.extractFPPower( is, amount ) >= amount - 0.5;
}
public double chargePower(final float amount, final ItemStack is )
{
return this.injectFPPower( is, amount );
}
}<file_sep># ChargeFoodMod
## Download and How to Use
* [Website](http://dmwebq.web.fc2.com/mc/chargeablefood.html)<file_sep>package hm.moe.lws2d.chargefoodmod;
import net.minecraft.util.ChatComponentTranslation;
import net.minecraft.util.IChatComponent;
public enum PlayerMessages
{
noFood;
public IChatComponent get()
{
return new ChatComponentTranslation( this.getName() );
}
String getName()
{
return "chat." + this.toString();
}
}<file_sep>package hm.moe.lws2d.chargefoodmod;
import net.minecraft.client.resources.model.ModelResourceLocation;
import net.minecraft.creativetab.CreativeTabs;
import net.minecraft.init.Blocks;
import net.minecraft.init.Items;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraftforge.client.model.ModelLoader;
import net.minecraftforge.fml.common.Mod;
import net.minecraftforge.fml.common.Mod.EventHandler;
import net.minecraftforge.fml.common.event.FMLInitializationEvent;
import net.minecraftforge.fml.common.event.FMLPreInitializationEvent;
import net.minecraftforge.fml.common.registry.GameRegistry;
@Mod(modid=ChargeFoodCore.MOD_ID, name="ChargeFoodMod", version="1.0")
public class ChargeFoodCore
{
public static final String MOD_ID = "chargefoodmod";
public static Item chargeFood;
public static Item mixingModule;
public static Item subspaceModule;
@EventHandler
public void preInit(FMLPreInitializationEvent event)
{
//インスタンスの代入。引数はID, 回復量, 狼が食べれるか
chargeFood = new ItemChargeFood(0.8F, false); //0.8F equals beef's value
mixingModule = new Item()
.setCreativeTab(CreativeTabs.tabMaterials)
.setUnlocalizedName("mixingModule")
.setMaxStackSize(64);
subspaceModule = new Item()
.setCreativeTab(CreativeTabs.tabMaterials)
.setUnlocalizedName("subspaceModule")
.setMaxStackSize(64);
GameRegistry.registerItem(chargeFood, "chargeFood");
GameRegistry.registerItem(mixingModule, "mixingModule");
GameRegistry.registerItem(subspaceModule, "subspaceModule");
if(event.getSide().isClient()){
ModelLoader.setCustomModelResourceLocation(chargeFood, 0, new ModelResourceLocation(MOD_ID + ":chargefood", "inventory"));
ModelLoader.setCustomModelResourceLocation(mixingModule, 0, new ModelResourceLocation(MOD_ID + ":mixingmodule", "inventory"));
ModelLoader.setCustomModelResourceLocation(subspaceModule, 0, new ModelResourceLocation(MOD_ID + ":subspacemodule", "inventory"));
}
}
@EventHandler
public void init(FMLInitializationEvent event)
{
GameRegistry.addRecipe(new ItemStack(chargeFood),
" M ",
"LSL",
"IMI",
'M', mixingModule,
'L', Items.leather,
'S', subspaceModule,
'I', Blocks.ice
);
GameRegistry.addRecipe(new ItemStack(mixingModule),
"IDI",
"iFd",
"CAC",
'I', Items.iron_ingot,
'D', Items.diamond,
'i', Items.iron_sword,
'F', Items.flint,
'd', Items.diamond_sword,
'C', Blocks.cobblestone,
'A', Items.diamond_axe
);
GameRegistry.addRecipe(new ItemStack(subspaceModule),
"PEP",
"OCO",
"YCY",
'P', Items.ender_pearl,
'E', Blocks.ender_chest,
'O', Blocks.obsidian,
'C', Blocks.chest,
'Y', Items.ender_eye
);
}
}
|
c20d740cf39a6c13f51f2805edc7173e1e889c2f
|
[
"Markdown",
"Java"
] | 4 |
Java
|
Lways/ChargeFoodMod
|
c95d7123d4ab601211bc22b495bdd651f8a2f9a2
|
a4d4a5d28596fcf311bc26c17fd9017d0f7895be
|
refs/heads/master
|
<file_sep>def division(num1, num2)
num1 / num2
end
def argue(thing)
thing
end
def greeting(greet, name)
end
def return_a_value
return "Nice"
end
def pizza_party(arg = "cheese")
return arg
end
def whisper(what)
return what.downcase
end
|
48a2df3130d08786a0bb6f634b85f6cbf7405574
|
[
"Ruby"
] | 1 |
Ruby
|
dive0/01-ruby-basics-lab
|
1f7cdcff664e7b000d7716f898542c68d2e02abf
|
4e91e450534268c6e4e674cba66fe6aa4b8a83cc
|
refs/heads/master
|
<repo_name>brianlens/taste-of-code-rss-reader<file_sep>/README.md
## Outline the Project
Today we will be building a RSS feed reader, using HTML, CSS and jQuery.
Before we start, it's always helpful to first outline what you want your project to do/achieve.
* Display a list of some of your favorites news sites, along with its square logo
* Display links to the last 5 articles for each site
* Open these articles in a different window when clicked on
[](https://raw.githubusercontent.com/Codaisseur/taste-of-code-rss-reader/master/screenshots/07-fine-tuning-01.png)
## HTML
HTML is the language of the web, used for writing web pages. Any website you ever visited was made with html. The language started off as a pure text based document format. Each new version of HTML, it is adapted to the new requirements of the web.
```html
<!DOCTYPE html>
<html>
<head>
<title>Taste of Code</title>
</head>
<body>
<h1>Agenda</h1>
<p>On the menu today:</p>
<ul>
<li>Structure with HTML</li>
<li>Styling with CSS</li>
<li>Happiness with Lunch</li>
</ul>
</body>
</html>
```
Let's explain this example one _tag_ at a time:
* `DOCTYPE` defines the document type to be HTML
* `<html>` describes an HTML document
* `<head>` provides information about the document
* `<title>` provides a title for the document
* `<body>` describes the visible page content
* `<h1>` describes a heading
* `<p>` describes a paragraph
* `<ul>` describes an unordered list
* `<li>` describes a list item
#### HTML Tags
HTML tags are **keywords** (tag names) surrounded by **angle brackets**:
```html
<tagname>building my first RSS reader</tagname>
```
* HTML tags normally come **in pairs** like `<p>` and `</p>`.
* The first tag in a pair is the **opening tag**, the second tag is the **closing tag**.
* The closing tag is written like the starting tag, but with a **slash** before the tag name.
#### ✎ Exercise: Your Very First HTML
The first step is to begin creating our project. Go ahead and make a new folder -- name it how you wish -- and add a new `index.html` file. We will be writing our first HTML here.
Apply everything you learnt until now and make your browser render a page similar to this one -- feel free to select any of your favorite news sites -- :
[](https://raw.githubusercontent.com/Codaisseur/taste-of-code-rss-reader/master/screenshots/01-html-01.png)
## CSS
CSS is a stylesheet language that describes the presentation of an HTML document, defining how elements must be rendered on screen, on paper, or in other media.
With CSS you can control the color of the text, the style of fonts, the spacing between paragraphs, how columns are sized and laid out, what background images or colors are used, layout designs, variations in display for different devices and screen sizes as well as a variety of other effects.
```html
<!DOCTYPE html>
<html>
<head>
<title>Taste of Code</title>
<style>
h1 {
color: red;
}
</style>
</head>
<body>
<h1>Agenda</h1>
...
</body>
</html>
```
Any style rule is made of three parts:
* **Selector** − Any HTML tag (like `<h1>` or `<table>`) at which a style will be applied.
* **Property** - A type of attribute of HTML tag. They could be color, border etc.
* **Value** - Assigned to properties. For example, `color` property can have a _red_ value.
You could write any CSS Style Rule Syntax like this:
```css
selector { property: value }
```
#### ✎ Exercise: Background color
Change the background-color of the HTML page to green.
[](https://raw.githubusercontent.com/Codaisseur/taste-of-code-rss-reader/master/screenshots/02-css-01.png)
#### CSS Selectors
You can select elements in your HTML by its **tag name** (this is the same example we have seen above):
```css
h1 {
color: red;
}
```
You can also select elements in your HTML by its **class name**:
```css
.warning {
color: red;
}
```
Check out a list of CSS properties [here](http://www.w3schools.com/cssref/default.asp).
#### ✎ Exercise: Warning
Apply the warning class to the main heading.
[](https://raw.githubusercontent.com/Codaisseur/taste-of-code-rss-reader/master/screenshots/02-css-02.png)
#### Linking to a CSS file from an HTML page
Use the `<link>` HTML tag to link to any CSS file. Its `href` attribute tells the browser where the CSS file is stored. Note that it is added within the `<head>` tag in your HTML file.
```html
<!DOCTYPE html>
<html>
<head>
<title>RSS Personal Reader</title>
<link rel="stylesheet" type="text/css" href="css/styles.css">
</head>
<body>
<!-- … -->
</body>
</html>
```
#### ✎ Exercise: Advanced Styling
Go ahead and apply everything you now know about CSS and do some chick styling in your project. Need inspiration when choosing colors? Check the ones in [this collection](http://www.lolcolors.com/).
If you still don't have anything in mind, you could try to do something like this:
[](https://raw.githubusercontent.com/Codaisseur/taste-of-code-rss-reader/master/screenshots/02-css-03.png)
If you want to challenge yourself, add some gradients to the background colors:
[](https://raw.githubusercontent.com/Codaisseur/taste-of-code-rss-reader/master/screenshots/02-css-04.png)
## JavaScript
JavaScript is used in browsers to make websites more interactive, interesting and user-friendly. Using JavaScript you can access and modify the content and markup used in a web page while it is being viewed in the browser. And pages can also respond to what the user does.
You might not know yet, but JavaScript lives already in every browser.
[](https://raw.githubusercontent.com/Codaisseur/taste-of-code-rss-reader/master/screenshots/03-js-01.png)
To open the Console tab in Chrome, do one of the following:
* Use the keyboard shortcut `Command` + `Option` + `J` (Mac) or `Control` + `Shift` + `J` (Windows/Linux).
* Select Chrome Menu > More Tools > JavaScript Console.
#### ✎ Exercise: JavaScript Maths
Open the JavaScript console in your browser and use it to do some calculations, like `10 + 5` or `123456 / 2`.
#### How JavaScript makes web pages more interactive
**1) Access Content:** Select any element, attribute or text from an HTML page. For example, you can select any elements that have a class attribute with a value of `warning`.
**2) Modify Content:** Add elements, attributes and text to the page, or remove them. For example, you can add a paragraph of text after the first `<h1>` element.
**3) Program Rules or Instructions:** Specify a set of steps for the browser to follow (like a recipe), which allows it to access or change the content of a page. For example, you can write a RSS feed script that checks which link a user clicked on and redirect him/her to the article page.
**4) React to Events:** Specify that a script should run when a specific event has occurred. For example, it could be run when a button is pressed or a web page has finished loading.
Before you learn how to read and write JavaScript itself, you need to become familiar with some key concepts in computer programming.
#### Objects (things) & Properties (characteristics)
In computer programming, each thing in the world can be represented as an object. Each object can have its own **properties** and **methods**.
Let's look at an example in the console:
The browser represents each window or tab using a `window` object. The `location` property of the `window` object will tell you the URL of the current page.
```javascript
window.location;
```
We can also use the `window` object to create an alert box with some text by using the `.alert()` method. Methods represent things people need to do with objects.
```javascript
window.alert('Hello');
```
We can use the `document` object to get the `title` of the page. The `title` property of the `document` object tells you what is between the opening `<title>` and closing `</title>` tag for that web page.
```javascript
document.title;
```
Or to print something into the `document` or web page:
```javascript
document.write('Nice to see you!');
```
* The `document` object represents the entire web page. All web browsers implement this object, and you can use it just by giving its name.
* The `write()` method of the `document` object allows new content to be written into the page where the `<script>` element sits.
* Whenever a method requires some information in order to work, the data is given inside the parentheses. Each piece of information is called a `parameter` of the method. In this case, the `write()` method needs to know what to write in to the page.
#### ✎ Exercise: Exploring the Console
Using the JavaScript console on your browser try some of these:
* Open up an alert box in your web page.
* Inspect the title of your web page.
* Modify the content of your web page.
#### What is a Variable?
A script will have to temporarily store the bits of information it needs to do its job. It can store this data in **variables**.
Variables can be thought of as named containers. You can place data into these containers and then refer to the data simply by naming the container.
In the following example you would use a variable to remember the `sum` value:
```javascript
var sum = 10 + 5;
```
You can also use variables to store a **string**:
```javascript
var greeting = "Good morning";
```
Or **boolean** data, which can have one of two values: `true` or `false`.
```javascript
var buildingFeedReaderAtNOS = true;
```
#### ✎ Exercise: Storing feed urls in variables
Visit the site [NEDERLANDSE RSS FEEDS](http://www.nationalemediasite.nl/rss-feeds.php) and select 4 sites that you want to follow. Find out each of the feed URLs for these sites and store each of them in a variable.
This is how an example with NOS feed URL would look like:
```javascript
var nosUrl = "http://feeds.nos.nl/nosjournaal";
```
#### Creating your first script
A script is a series of instructions that a computer can follow step-by-step to achieve a goal. To write a script, you need to:
* first state your goal
* and then list the tasks that need to be completed to achieve it
Start with the big picture of what you want to achieve, and break that down into smaller steps.
This script adds a greeting into an HTML page. The greeting changes depending on the time of the day.
```javascript
// js/greeting.js
var today = new Date();
var hourNow = today.getHours();
var greeting;
if (hourNow > 18) {
greeting = 'Good evening!';
} else if (hourNow > 12) {
greeting = 'Good afternoon!';
} else if (hourNow > 0) {
greeting = 'Good morning!';
} else {
greeting = 'Welcome!';
}
document.write('<h3>' + greeting + '</h3>');
```
#### Linking to a JavaScript file from an HTML page
When you want to use a JavaScript with a web page, you use the HTML `<script>` element to tell the browser that is coming across a script. Its `src` attribute tells people where the JavaScript file is stored.
JavaScript runs where it is found in the HTML. When the browser comes across a `<script>` element, it stops to load the script and then checks to see if it needs to do anything.
Note that the location of the `greeting.js` script affects where the new greeting is written into the page.
```html
<!-- index.html -->
<!DOCTYPE html>
<html>
<head>
<title>RSS Personal Reader</title>
<link rel="stylesheet" type="text/css" href="css/styles.css">
</head>
<body>
<script src="js/greeting.js"></script>
<h1 class="title"> RSS Personal Reader </h1>
<!-- ... -->
</body>
</html>
```
#### ✎ Exercise: Greeting with JavaScript
Add any message you wish at the top of your web page depending on the day time.
[](https://raw.githubusercontent.com/Codaisseur/taste-of-code-rss-reader/master/screenshots/03-js-02.png)
#### What is an Object
You have already learned that JavaScript variables are containers for data values, where we can store any data we wish.
Objects group together a set of variables to create a model of something you would recognize from the real world. In an object, variables become known as properties.
Example of an object could be a library where a collection of feed URLs are stored categorized by site.
```javascript
var feedsLibrary = {
nos: "http://feeds.nos.nl/nosjournaal"
}
```
Every property in an object has a name (programmers call it **key**) and a **value**. In our previous example, `nos` is the key of the property and `http://feeds.nos.nl/nosjournaal` is its value.
#### Access & update text / markup with innerHTML
You can select any element in the HTML by selecting its **class** or **id**.
Let's add an id `nos` to the unordered list and reference a new JavaScript file `rss.js` at the end of the `<body>`:
```html
<!-- index.html -->
<!DOCTYPE html>
<html>
<head>
<title>RSS Personal Reader</title>
<link rel="stylesheet" type="text/css" href="css/styles.css">
</head>
<body>
<script src="js/greeting.js"></script>
<h1 class="main-title"> RSS Personal Reader </h1>
<h3 class="site-title">NOS Nieuws</h3>
<ul id="nos"></ul>
<script src="js/rss.js"></script>
</body>
</html>
```
And let's look at how to show the `nosUrl` value within the `nos` unordered list:
```javascript
// rss.js
var nosUrl = "http://feeds.nos.nl/nosjournaal";
var feedsLibrary = {
nos: nosUrl
};
var ulNos = document.getElementById('nos');
ulNos.innerHTML = '<a href="' + feedsLibrary.nos + '">' + feedsLibrary.nos +'</a>';
```
#### ✎ Exercise: Rendering feed URLs with JavaScript
Go ahead and show each of the feed URLs stored in the `feedLibrary` object under its corresponding unordered list. Your web page should end up looking something like this:
[](https://raw.githubusercontent.com/Codaisseur/taste-of-code-rss-reader/master/screenshots/03-js-03.png)
## jQuery
[jQuery](https://jquery.com/) makes writing JavaScript a lot easier. It offers a simple way to achieve a variety of common JavaScript tasks quickly and consistently, across all major browsers.
#### Find elements using CSS-style selectors
jQuery lets you find elements using CSS-style selectors and then do something with the elements using jQuery methods.
```javascript
// javascript
var ulNos = document.getElementById('nos');
ulNos.innerHTML = '<a href="' + feedsLibrary.nos + '">NOS Feeds</a>';
//jQuery
var ulNos = $('ul#nos');
ulNos.html('<a href="' + feedsLibrary.nos + '">NOS Feeds</a>');
```
#### Do something with the elements using jQuery methods
The jQuery object has many methods that you can use to work with the elements you select. The methods represents tasks that you commonly need to perform with elements.
Example of how to add a class to a HTML element:
```javascript
$('ul#nos').addClass('complete');
```
#### Include jQuery in your page
In order to use jQuery, the first thing you need to do is include the jQuery script in your page. You can see that it is included within the `<head>` element. Once jQuery has been added to the page, you can include any JavaScript file that uses jQuery selectors and methods to update the content of the HTML page.
```html
<!DOCTYPE html>
<html>
<head>
<title>RSS Personal Reader</title>
<link rel="stylesheet" type="text/css" href="css/styles.css">
</head>
<body>
<!-- ... -->
<script src="http://code.jquery.com/jquery-1.12.4.min.js"></script>
<script src="js/rss.js"></script>
</body>
</html>
```
#### ✎ Exercise: Adding jQuery and updating the syntax
Go ahead and apply what you learnt from jQuery by adding it to your web page and rewriting the `rss.js` file with the new syntax.
#### Working with each property in an object
jQuery allows you to recreate the functionality of a loop through the properties of an object, using the `.each()` method.
There are times when you will want to loop through a collection of properties. Often this will be to perform a _series_ of actions on each of the elements. The `.each()` method is provided for this purpose.
At this moment we are repeating the same action on each of the properties contained in the `feedsLibrary` object:
```javascript
var ulNos = $('#nos');
ulNos.html('<a href="' + feedsLibrary.nos + '">NOS Feeds</a>');
var ulTelegraaf = $('#telegraaf');
ulTelegraaf.html('<a href="' + feedsLibrary.telegraaf + '">Telegraaf Feeds</a>');
var ulVolkskrant = $('#volkskrant');
ulVolkskrant.html('<a href="' + feedsLibrary.volkskrant + '">Volkskrant Feeds</a>');
var ulNrc = $('#nrc');
ulNrc.html('<a href="' + feedsLibrary.nrc + '">NRC Feeds</a>');
```
Let's see how we could write this without repeating ourselves by using the `.each()` method:
```javascript
var feedsLibrary = {
nos: "http://feeds.nos.nl/nosjournaal",
telegraaf: "http://www.telegraaf.nl/rss/",
volkskrant: "http://www.volkskrant.nl/nieuws/rss.xml",
nrc: "http://www.nrc.nl/rss/",
};
$.each(feedsLibrary, function(key, value) {
var ulElement = $('#' + key);
ulElement.html('<a href="' + value + '">' + value +'</a>');
});
```
The first parameter of the `.each()` method is the object through which we want to loop and the second is a **function**. Functions let you group a series of statements together to perform a specific task.
In this case we are defining a function that first selects an element in the HTML and afterwards place a link inside that element.
#### ✎ Exercise: Looping through feedsLibrary
Use the `.each()` method to loop through the `feedsLibrary` and render the feed urls in your web page. You web page should end up looking the same.
## Yahoo Query Language (YQL)
The [YQL](https://developer.yahoo.com/yql/) (Yahoo! Query Language) platform enables you to query, filter, and combine data across the web through a single interface. It uses a **SQL-like syntax** that is expressive enough for getting the right data.
_(We won't be going deep into SQL in this tutorial. It is enough for now that you know that SQL is just a special-purpose programming language designed for managing data stored in databases.)_
#### Exploring YQL
YQL is ridiculously easy to work with. In our case, we'll use it for a very simple purpose: grab the RSS feed for each of the sites, passing them through the query string. We will be using the JSON form of the data.
Check in the image below how it would look like for the case of NOS:
[](https://raw.githubusercontent.com/Codaisseur/taste-of-code-rss-reader/master/screenshots/05-yql-01.png)
Do you see that **endpoint** at the bottom of the page? That's the URL where we need to go to retrieve our feeds. That data is stored somewhere in the world within a web server.
Let's go and visit that URL address:
[](https://raw.githubusercontent.com/Codaisseur/taste-of-code-rss-reader/master/screenshots/05-yql-02.png)
What we see there it's an **object** in JSON form, from which we are specially interested in grabbing the values of the **link** and **title** of each of the items.
To query an RSS feed, we will be using the following query string:
```sql
SELECT * FROM feed WHERE url="path/to/rss/feed" LIMIT 5
```
Remember that we are storing our paths to the rss feeds in the `feedsLibrary` object:
```javascript
var feedsLibrary = {
nos: "http://feeds.nos.nl/nosjournaal",
telegraaf: "http://www.telegraaf.nl/rss/",
volkskrant: "http://www.volkskrant.nl/nieuws/rss.xml",
nrc: "http://www.nrc.nl/rss/",
};
```
#### ✎ Exercise: Visiting the Endpoints
Check out the different endpoints for the sites that you stored under the `feedsLibrary`.
## Ajax
The browser requests information from a web server using **Ajax**. It then processes the server's response and shows it within the page.
#### How Ajax Works
**1) The Request**
The browser requests information/data from the server.
**2) On the Server**
When there is an Ajax request, the server might send back HTML, or it might send data in a different format such as JSON (in the case of YQL) or XML (which the browser turns into HTML).
**3) The Response**
The browser processes the content and adds it to the page. When the server has finished responding to the request, the browser will fire an event that is used to trigger a JavaScript function that will process the data and incorporate it into the page.
#### Building the path for the request
This is how the URL address (endpoint) from where we will be requesting the data looks like:
```
https://query.yahooapis.com/v1/public/yql?q=SELECT%20*%20FROM%20feed%20WHERE%20url%3D%27http%3A%2F%2Ffeeds.nos.nl%2Fnosjournaal%27%20LIMIT%205&format=json&env=store%3A%2F%2Fdatatables.org%2Falltableswithkeys
```
We need to build up that URL address for each of the sites in our `rss.js` file. For the sake of readability, we will be building up our YQL query in sections, and we need to make sure that we url encode the query to replace any special characters:
```javascript
// js/rss.js
var baseUrl = "http://query.yahooapis.com/v1/public/yql?q=";
var queryString = encodeURI("SELECT * FROM feed WHERE url='" + nosUrl + "' LIMIT 5");
var format = "&format=json";
var rssFeedPath = baseUrl.concat(queryString, format);
```
#### ✎ Exercise: Printing out the rss feeds paths
Apply everything you know of YQL and Ajax and print out the path for each of the sites that you are following in the JavaScript console.
The result should look like something similar to this:
[](https://raw.githubusercontent.com/Codaisseur/taste-of-code-rss-reader/master/screenshots/06-ajax-02.png)
#### Handling Ajax Requests & Responses
jQuery provides several methods that handle Ajax requests. The process involves two steps: making a request and handling the response.
In this case we will need to load JSON data from a web server and the `.getJSON()` method is the one that helps us doing so.
```javascript
$.getJSON(url, function(response) {
// Do something with the response
});
```
* `url` specifies where the data is fetched from
* `function(response) {}` indicates the function to be called when data is returned
#### ✎ Exercise: Printing out the response in the console
Using the .getJSON() method, print out the value of the response coming from the web server into the JavaScript console for each of the rss feeds paths.
The result should look like something similar to this:
[](https://raw.githubusercontent.com/Codaisseur/taste-of-code-rss-reader/master/screenshots/06-ajax-03.png)
When you are done with it, go ahead and print out `response.query`, `response.query.results` and `response.query.results.item`.
#### Retrieving articles title and link
When we print out `response.query.results.item` what we get is an **array** of objects.
[](https://raw.githubusercontent.com/Codaisseur/taste-of-code-rss-reader/master/screenshots/06-ajax-04.png)
Arrays are another way of storing data as a collection. In this case what we get is a collection of articles. We can access the data for each of those articles (or `feedItems` ;) ) as follows:
```javascript
var feedItems = response.query.results.item;
// First item data
link1 = feedItems[0].link;
title1 = feedItems[0].title;
// Second item data
link2 = feedItems[1].link;
title2 = feedItems[1].title;
// Third item data
link3 = feedItems[2].link;
title3 = feedItems[2].title;
// ...
```
`[0]`, `[1]` and`[2]` are the **index** of the elements, which represents the position of the element within the array.
#### ✎ Exercise: Printing out the articles links and titles in the console
Go ahead and print out each of the articles links and titles in the console. The result should look like something similar to this:
[](https://raw.githubusercontent.com/Codaisseur/taste-of-code-rss-reader/master/screenshots/06-ajax-05.png)
It looks like we are repeating the same action over each of the items in the `feedItems` collection. Do you remember the `.each()` method? That's what we used to loop through the `feedsLibrary` object. It looks that we might need something similar in this case...
This is how you can use the `.each()` method to loop through an array:
```javascript
$.each(itemsCollection, function(index) {
var item = itemsCollection[index];
// Do something with the item
});
```
#### ✎ Exercise: Rendering the articles links and titles in the web page
After all you learnt up until now, you should be able to render a list item including a link to each of the articles under each site. Make it so that every link opens in a new tab in your browser.
Give it a try! The result should look like something similar to this:
[](https://raw.githubusercontent.com/Codaisseur/taste-of-code-rss-reader/master/screenshots/06-ajax-06.png)
Reading [this](http://api.jquery.com/append/) will give you an idea about how to _append_ a list item to an existing unordered list.
When you are done with it, go ahead, be creative and make every last change in your web page that you wish to make it look super awesome!
<file_sep>/code/01-html/index.html
<!DOCTYPE html>
<html>
<head>
<title>RSS Personal Reader</title>
</head>
<body>
<h1> RSS Personal Reader </h1>
<ul>
<li>NOS Nieuws</li>
<li>Telegraaf Nieuws</li>
<li>Volkskrant Nieuws</li>
<li>NRC Nieuws</li>
</ul>
<h4> Made with ❤ at NOS Hilversum on June 2016 </h4>
</body>
</html>
<file_sep>/code/03-js/js/rss.js
var nosUrl = "http://feeds.nos.nl/nosjournaal";
var telegraafUrl = "http://www.telegraaf.nl/rss/";
var volkskrantUrl = "http://www.volkskrant.nl/nieuws/rss.xml";
var nrcUrl = "http://www.nrc.nl/rss/";
var feedsLibrary = {
nos: nosUrl,
telegraaf: telegraafUrl,
volkskrant: volkskrantUrl,
nrc: nrcUrl,
};
var ulNos = document.getElementById('nos');
ulNos.innerHTML = '<a href="' + feedsLibrary.nos + '" target="_blank">' + feedsLibrary.nos +'</a>';
var ulTelegraaf = document.getElementById('telegraaf');
ulTelegraaf.innerHTML = '<a href="' + feedsLibrary.telegraaf + '" target="_blank">' + feedsLibrary.telegraaf +'</a>';
var ulVolkskrant = document.getElementById('volkskrant');
ulVolkskrant.innerHTML = '<a href="' + feedsLibrary.volkskrant + '" target="_blank">' + feedsLibrary.volkskrant +'</a>';
var ulNrc = document.getElementById('nrc');
ulNrc.innerHTML = '<a href="' + feedsLibrary.nrc + '" target="_blank">' + feedsLibrary.nrc +'</a>';
|
123a38f8b1a2cf15d65316a7014d38332d8a2c5c
|
[
"Markdown",
"JavaScript",
"HTML"
] | 3 |
Markdown
|
brianlens/taste-of-code-rss-reader
|
3f544d14466fd6c1a65818a6f7d923be04d2b086
|
a6bc9b0eb648c3b9d8b860bed0369bd0a449dd46
|
refs/heads/master
|
<file_sep>INSERT INTO user(userid, username, password, role, firstname, middlename, lastname, phonenumber, email, street, city, zip, state, country, unit_number)
VALUES (1, 'thumpthump', 'password', 'USER', 'Randy', null, 'Calderon', '901-333-2014', '<EMAIL>', '6333 Brick Avenue', 'Burbank', 91606, 'CA', 'USA', null),
(2, 'jimmyj', 'password', 'USER', 'Jimmy', 'Alex', 'Sanders', '802-345-1930', '<EMAIL>', '1020 Bermuda Road', 'Houston', 70129, 'TX', 'USA', null),
(3, 'metax', 'password', 'USER', 'Rob', null, 'Robinson', '740-232-3890', '<EMAIL>', '8327 Jersey Lane', 'Newport', 008859, 'NJ', 'USA', null);
INSERT INTO supplier(supplierid,suppliername,productname,quantity)
VALUES (1, 'Kibbles Inc','Kibbles n Bits', 400),
(2, 'Purina Inc', 'Purina One SmartBlend', 300),
(3, 'Chapstick Inc', 'Chapstick Moisturizing Lip Balm 3ct', 1000),
(4, 'Pop Secret Inc', 'Pop Secret Premium Popcorn 30ct', 700);
INSERT INTO product(productid, productname, description, quantity, price)
VALUES (1, 'Kibbles n Bits', 'Chicken Flavors Dry Dog 50lb.', 30, 24.99),
(2, 'Purina One SmartBlend', 'Real Salmon & Tuna Adult Dry Dog food 27.5lb', 15, 32.99),
(3, 'Chapstick Moisturizing Lip Balm 3ct', 'Help heat and protect your lips', 40, 2.99),
(4, 'Pop Secret Premium Popcorn 30ct', 'Movie theater butter', 25, 12.99);
INSERT INTO orders(orderid, userid, productname, quantity, productid)
VALUES (1, 1,'Chapstick Moisturizing Lip Balm 3ct', 2, 3),
(2, 3,'Pop Secret Premium Popcorn 30ct', 3, 4),
(3, 2,'Purina One SmartBlend', 5, 2),
(4, 3,'Kibbles n Bits', 8, 1);
INSERT INTO supplier_products(productid, supplierid)
VALUES (1, 1),
(2, 2),
(3, 3),
(4, 4);
-- INSERT INTO cart(userid, productid, quantity)
<file_sep>package com.randyc.shoppingcart.models;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import javax.persistence.*;
import java.util.HashSet;
import java.util.Set;
@Entity
@Table(name = "supplier")
public class Supplier {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
private long supplierid;
private String suppliername;
private String productname;
private int quantity;
// Many to Many for supplier_products table join
@ManyToMany
@JoinTable(name = "supplier_products",
joinColumns = {@JoinColumn(name="supplierid")}, inverseJoinColumns = {@JoinColumn(name="productid")})
@JsonIgnoreProperties("suppliers")
private Set<Product> products = new HashSet<>();
public Supplier() {}
public long getSupplierid() {
return supplierid;
}
public void setSupplierid(long supplierid) {
this.supplierid = supplierid;
}
public String getSuppliername() {
return suppliername;
}
public void setSuppliername(String suppliername) {
this.suppliername = suppliername;
}
public String getProductname() {
return productname;
}
public void setProductname(String productname) {
this.productname = productname;
}
public int getQuantity() {
return quantity;
}
public void setQuantity(int quantity) {
this.quantity = quantity;
}
public Set<Product> getProducts() {
return products;
}
public void setProducts(Set<Product> products) {
this.products = products;
}
}
<file_sep>package com.randyc.shoppingcart.repository;
import com.randyc.shoppingcart.models.Supplier;
import org.springframework.data.jpa.repository.JpaRepository;
public interface SupplierRepository extends JpaRepository<Supplier, Long> {
}
|
48c235d7751966ecf8616865884def8ced67036c
|
[
"Java",
"SQL"
] | 3 |
SQL
|
RandyCalderon/java-shoppingcart
|
b27034072edc92aedd82f30e6ca8ef956863f83e
|
a326e1b57ec5a7539f21c1ed6642a871ad0f02d3
|
refs/heads/master
|
<repo_name>Nikitae57/Memby<file_sep>/app/src/main/java/ru/nikitae57/englisher/layout/dictionary/DictionaryViewHolder.kt
package ru.nikitae57.englisher.layout.dictionary
import android.support.v7.widget.RecyclerView
import android.view.View
import android.widget.TextView
import kotlinx.android.synthetic.main.dictionary_view_holder.view.*
import ru.nikitae57.englisher.R
import ru.nikitae57.englisher.dictionary.Entry
class DictionaryViewHolder(itemView: View?) : RecyclerView.ViewHolder(itemView) {
private var mTvWord: TextView = itemView!!.findViewById(R.id.dict_item_tv_word)
private var mTvMeaning: TextView = itemView!!.findViewById(R.id.dict_item_tv_meaning)
fun bind(position: Int, dictionary: ArrayList<Entry>) {
mTvWord.text = dictionary[position].word
mTvMeaning.text = dictionary[position].meaning
}
}<file_sep>/app/src/main/java/ru/nikitae57/englisher/layout/dictionary/DictionaryAdapter.kt
package ru.nikitae57.englisher.layout.dictionary
import android.support.v7.widget.RecyclerView
import android.view.LayoutInflater
import android.view.ViewGroup
import ru.nikitae57.englisher.R
import ru.nikitae57.englisher.dictionary.Entry
class DictionaryAdapter(private var dictionary: ArrayList<Entry>) : RecyclerView.Adapter<DictionaryViewHolder>() {
override fun onCreateViewHolder(parent: ViewGroup, viewType: Int): DictionaryViewHolder {
return DictionaryViewHolder(LayoutInflater.from(parent.context).inflate(
R.layout.dictionary_view_holder,
parent,
false)
)
}
override fun getItemCount() = dictionary.size
override fun onBindViewHolder(holder: DictionaryViewHolder, position: Int) {
holder.bind(position, dictionary)
}
}<file_sep>/app/src/main/java/ru/nikitae57/englisher/layout/dictionary/DictionaryFragment.kt
package ru.nikitae57.englisher.layout.dictionary
import android.os.Bundle
import android.support.v4.app.Fragment
import android.support.v7.widget.LinearLayoutManager
import android.support.v7.widget.RecyclerView
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import kotlinx.android.synthetic.main.fragment_dictionary.*
import ru.nikitae57.englisher.R
import ru.nikitae57.englisher.dictionary.Entry
class DictionaryFragment : Fragment() {
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
}
override fun onCreateView(inflater: LayoutInflater, container: ViewGroup?,
savedInstanceState: Bundle?): View? {
return inflater.inflate(R.layout.fragment_dictionary, container, false)
}
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
val recyclerView: RecyclerView? = view.findViewById(R.id.dict_rv)
recyclerView!!.layoutManager = LinearLayoutManager(context)
val array = ArrayList<Entry>()
array.add(Entry("Cow", "Animal"))
array.add(Entry("Cow", "Animal"))
array.add(Entry("Cow", "Animal"))
array.add(Entry("Cow", "Animal"))
array.add(Entry("Cow", "Animal"))
recyclerView.adapter = DictionaryAdapter(array)
}
}
<file_sep>/app/src/main/java/ru/nikitae57/englisher/MainActivity.kt
package ru.nikitae57.englisher
import android.support.v7.app.AppCompatActivity
import android.os.Bundle
import android.view.MenuItem
import ru.nikitae57.englisher.layout.dictionary.DictionaryFragment
class MainActivity : AppCompatActivity() {
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
val fragment = DictionaryFragment()
supportFragmentManager.beginTransaction().add(R.id.fragment_view, fragment).commit()
}
override fun onOptionsItemSelected(item: MenuItem?): Boolean {
when (item!!.itemId) {
R.id.action_open_dictionary -> {
// TODO make dictionary fragment
}
R.id.action_open_learn_screen -> {
// TODO make learn fragment
}
R.id.action_open_statistics -> {
// TODO make statistics fragment
}
}
return super.onOptionsItemSelected(item)
}
}
<file_sep>/app/src/main/java/ru/nikitae57/englisher/dictionary/Entry.kt
package ru.nikitae57.englisher.dictionary
data class Entry(val word: String, val meaning: String)<file_sep>/app/src/main/java/ru/nikitae57/englisher/StartMenuActivity.kt
package ru.nikitae57.englisher
import android.support.v7.app.AppCompatActivity
import android.os.Bundle
class StartMenuActivity : AppCompatActivity() {
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_start_menu)
}
}
|
16b0ede0ab71093c57206355fdef6ca63a08556d
|
[
"Kotlin"
] | 6 |
Kotlin
|
Nikitae57/Memby
|
4ba95c4b995e7a5d48cba912e61e83d1ac629938
|
1e633966d70dc061ce93bfa5e72fae2439e8da1a
|
refs/heads/master
|
<file_sep># coding:utf-8
from flask import Flask
from flask import request
import requests
import json
import re
import logging
from apscheduler.schedulers.blocking import BlockingScheduler
from datetime import datetime
import configparser
# create logger
#logger = logging.getLogger('simple_example')
#logger.setLevel(logging.DEBUG)
logging.basicConfig( level=logging.DEBUG)
app = Flask(__name__)
sc = BlockingScheduler()
inifile = configparser.SafeConfigParser()
inifile.read("./config.ini")
LINEBOT_API_EVENT ='https://trialbot-api.line.me/v1/events'
LINE_HEADERS = {
'Content-type': 'application/json; charset=UTF-8',
'X-Line-ChannelID': inifile.get('LINE_HEADERS','Channel_ID'),
'X-Line-ChannelSecret': inifile.get('LINE_HEADERS','Channel_Secret'),
'X-Line-Trusted-User-With-ACL': inifile.get('LINE_HEADERS','Channel_MID')
}
def post_event( to, content):
msg = {
'to': [to],
'toChannel': 1383378250, # Fixed value
'eventType': "138311608800106203", # Fixed value
'content': content
}
r = requests.post(LINEBOT_API_EVENT, headers=LINE_HEADERS, data=json.dumps(msg))
def post_text( to, text ):
content = {
'contentType':1,
'toType':1,
'text':text,
}
post_event(to, content)
@sc.scheduled_job('interval', minutes=1, id='info_job_id')
def cronjob():
to = inifile.get('test','send_to') #test:write lineID
now = datetime.now().strftime("%p%I:%M:%S")
post_text(to,"Hello info!\nおはようございます.\n現在{}です.".format(now))
@app.route('/')
def index():
return "<h2>Hello Everyone!</h2>"
@app.route('/callback', methods=['post'])
def hellw():
#sc.start()
msgs = request.json['result']
#logger.debug(msgs)
for msg in msgs:
logging.debug("%s",msg['content']['from'])
text = msg['content']['text']
post_text(msg['content']['from'],text)
return ""
@app.route('/miya')
def miya():
return "<h1>こんにちわ!</h1>"
sc.start()
if __name__ == '__main__':
app.run(port=8080)
<file_sep># My Line bot
## Description
Line Botアプリの習作
## Features
- 現在時刻の呼び出し
## Requirement
- Python3.4+
## Usage
(Later...)
## Installation
(Later...)
## Author
[m_miyazaki](https://github.com/titanium99)
## License
[MIT](http://b4b4r07.mit-license.org)
<file_sep>APScheduler==3.1.0
beautifulsoup4==4.4.1
Flask==0.10.1
gunicorn==19.4.5
itsdangerous==0.24
Jinja2==2.8
lxml==3.6.0
MarkupSafe==0.23
pytz==2016.4
requests==2.9.1
six==1.10.0
tinydb==3.1.3
tzlocal==1.2.2
Werkzeug==0.11.5
<file_sep># coding:utf-8
import requests
import random
import datetime
from bs4 import BeautifulSoup
from tinydb import TinyDB
from apscheduler.schedulers.blocking import BlockingScheduler
# set tinyDB
db = TinyDB("db.json")
db_table = db.table('change_table')
# set apscheduler
sc = BlockingScheduler()
def crawl_site(datequery=None):
""" PMDAの1ヶ月以内に更新された添付文書情報のスクレイピング用
指定URLのHTMLを取得 """
if datequery == None:
url = 'http://www.info.pmda.go.jp/psearch/tenpulist.jsp'
else:
url = 'http://www.info.pmda.go.jp/psearch/tenpulist.jsp?DATE={}'.format(datequery)
htm = requests.get(url).text
timestamp = datetime.datetime.now().timestamp()
dest = {'html':htm,'timestamp':timestamp}
return dest
def scraping_html(src):
"""PMDAの1ヶ月以内に更新された添付文書情報のスクレイピング用
BSで該当tableタグ取得->該当データをdict-> tinyDBに入れる(json保存)
"""
""" Todo:4/25企業名にスペースが入っていると、以降が切れるのを修正
"""
soup = BeautifulSoup(src['html'])
table = soup.find('h2', text='掲載分').next_sibling.next_sibling
tr = table.find_all('tr')
changelist = []
for td in tr[1:]:
tdlist = [i for i in td.find_all('td')]
update = {}
update['date'] = soup.find('td',class_='title').text
update['seihin'] = tdlist[0].text.split()[0]
update['kigyo'] = tdlist[1].text
update['status'] = tdlist[2].text
update['timestamp'] = src['timestamp']
changelist.append(update)
db_table.insert_multiple(changelist)
#<EMAIL>('cron', day_of_week='mon-fri', hour='0', minute='26-28')
@sc.scheduled_job('cron', hour='15', minute='12')
def do_scraping():
src = crawl_site()
scraping_html(src)
@sc.scheduled_job('cron', hour='10',minute='6-8', id='greet')
def greet():
now = datetime.datetime.now().strftime('%H:%M:%S')
with open('test.txt','a') as f:
f.write('good morning!{}'.format(now))
if __name__ == '__main__':
sc.start()
<file_sep># coding:utf-8
from flask import Flask
from flask import request
import requests
import json
import re
import logging
#from apscheduler.schedulers.blocking import BlockingScheduler
from datetime import datetime
import pytz
import configparser
from tinydb import TinyDB, Query
from bs4 import BeautifulSoup
# create logger
#logger = logging.getLogger('simple_example')
#logger.setLevel(logging.DEBUG)
logging.basicConfig( level=logging.DEBUG)
app = Flask(__name__)
# read initfile
inifile = configparser.SafeConfigParser()
inifile.read("./config.ini")
# setting timezone
tz_tokyo = pytz.timezone('Asia/Tokyo')
# setting tinyDB
db = TinyDB('db.json')
db_table = db.table('change_table')
# set line api
LINEBOT_API_EVENT ='https://trialbot-api.line.me/v1/events'
LINE_HEADERS = {
'Content-type': 'application/json; charset=UTF-8',
'X-Line-ChannelID': inifile.get('LINE_HEADERS','Channel_ID'),
'X-Line-ChannelSecret': inifile.get('LINE_HEADERS','Channel_Secret'),
'X-Line-Trusted-User-With-ACL': inifile.get('LINE_HEADERS','Channel_MID')
}
def post_event( to, content):
msg = {
'to': [to],
'toChannel': 1383378250, # Fixed value
'eventType': "138311608800106203", # Fixed value
'content': content
}
r = requests.post(LINEBOT_API_EVENT, headers=LINE_HEADERS, data=json.dumps(msg))
def post_text( to, text ):
content = {
'contentType':1,
'toType':1,
'text':text,
}
post_event(to, content)
def now_time(to):
timeformat = "%p%I:%M:%S"
utcnow = datetime.now().strftime(timeformat)
tknow = datetime.now(tz_tokyo).strftime(timeformat)
post_text(to,"こんにちわ!.\n現在{}です.".format(tknow))
def search_db(querystr,queryday=datetime.now().strftime("%m月%d日")):
qr = Query()
qr = db_table.search(qr.date.search(queryday) &
qr.kigyo.search(querystr))
res = {i['seihin'] for i in qr}
return res
def ydn_post_text(msg):
jlp_url = 'http://jlp.yahooapis.jp/MAService/V1/parse'
params = {'appid': inifile.get('yahoo_api','appid'),
'sentence': msg,
'results': 'ma',
'filter': '9'}
resxml = requests.get(jlp_url,params=params)
soup = BeautifulSoup(resxml.text)
rslt = [i for i in soup.ma_result.word_list]
return [j.surface.string for j in rslt]
@app.route('/')
def index():
return "<h2>Hello Everyone!</h2>"
@app.route('/callback', methods=['post'])
def hellw():
msgs = request.json['result']
#logger.debug(msgs)
for msg in msgs:
logging.debug("%s",msg['content']['from'])
text = msg['content']['text']
to = msg['content']['from']
if text == "何時?":
now_time(to)
elif re.search(u"更新情報(:|:)",text):
morph = ydn_post_text(text)
kigyo = morph[morph.index("情報") + 1]
tkdate = datetime.now(tz_tokyo).strftime("%m月%d日")
kousin = search_db(kigyo,tkdate)
if len(kousin) == 0:
post_text(to, "本日の更新はないです。")
else:
res = "本日の更新は下記になります。\n{}".format("\n".join(kousin))
post_text(to,res)
else:
post_text(to,text)
return ""
@app.route('/miya')
def miya():
return "<h1>こんにちわ!</h1>"
if __name__ == '__main__':
app.run(port=8080)
<file_sep>import multiprocessing
# Server Socket
#bind = 'unix:/tmp/gunicorn_my_app.sock'
#backlog = 2048
bind = "127.0.0.1:8080"
# Worker Processes
workers = multiprocessing.cpu_count() * 2 + 1
worker_class = 'sync'
worker_connections = 1000
max_requests = 0
timeout = 30
keepalive = 2
debug = False
spew = False
# Logging
logfile = '/var/log/myapp_guni/app.log'
loglevel = 'info'
logconfig = None
# Process Name
proc_name = 'gunicorn_my_app'
|
e0cf93d61f4d4e9a41aeacba3e717f7a5cf22e1e
|
[
"Markdown",
"Python",
"Text"
] | 6 |
Python
|
titanium99/my_linebot_app
|
213dedc490543722c86d355b44465e66dd516757
|
ac6d06b7ddc92f120347f456d81203144ef89e9e
|
refs/heads/master
|
<repo_name>lndkhoa011195/eProject_OnlineAptitudeTest<file_sep>/OnlineAptitudeTest/OnlineAptitudeTest/Controllers/LoginController.cs
using OnlineAptitudeTest.Common;
using OnlineAptitudeTest.Models;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
namespace OnlineAptitudeTest.Controllers
{
public class LoginController : Controller
{
User user;
// GET: Login
public ActionResult Index()
{
if (Session[UserSession.ISLOGIN] != null && (bool)Session[UserSession.ISLOGIN])
{
if ((int)Session[UserSession.ROLEID] == 1)
return RedirectToAction("Index", "Admin");
if ((int)Session[UserSession.ROLEID] == 2)
return RedirectToAction("Index", "Manager");
if ((int)Session[UserSession.ROLEID] == 3)
return RedirectToAction("Index", "Candidate");
}
return View();
}
[HttpPost]
public ActionResult Index(LoginModel model)
{
if (ModelState.IsValid)
{
if (model.CheckLogin(model))
{
user = new User();
if (user.IsAdmin())
return RedirectToAction("Index", "Admin");
if (user.IsManager())
return RedirectToAction("Index", "Manager");
if (user.IsCandidate())
return RedirectToAction("Index", "Candidate");
}
else
ViewBag.error = "Tài khoản hoặc mật khẩu không đúng";
}
else
ViewBag.error = "Có lỗi xảy ra trong quá trình xử lý, vui lòng thử lại sau.";
return View();
}
}
}<file_sep>/OnlineAptitudeTest/OnlineAptitudeTest/Models/QuestionData.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
namespace OnlineAptitudeTest.Models
{
public class QuestionData
{
//test
//hello
}
}<file_sep>/OnlineAptitudeTest/OnlineAptitudeTest/Controllers/ManagerController.cs
using OnlineAptitudeTest.Common;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
namespace OnlineAptitudeTest.Controllers
{
public class ManagerController : Controller
{
User user = new User();
// GET: Manager
public ActionResult Index()
{
if (!user.IsManager())
return View("Error");
return View();
}
public ActionResult Logout()
{
if (!user.IsManager())
return View("Error");
user.Reset();
return RedirectToAction("Index", "Login");
}
}
}
|
32a2a1c72b395a87b0258814c9a12186433a5ae1
|
[
"C#"
] | 3 |
C#
|
lndkhoa011195/eProject_OnlineAptitudeTest
|
65ed0d2acc15c3744b02d7563fbdf9d187120a5b
|
1d6e2de34ffd2232769f3cd8c1c617bb56bb4a2a
|
refs/heads/master
|
<file_sep>//
// Created by gerardo on 28/03/17.
//
#ifndef AIRWAR_NAVE_H
#define AIRWAR_NAVE_H
#include <allegro5/allegro.h>
#include <allegro5/allegro_image.h>
class Nave{
private:
int X;
int Y;
int Puntaje = 0;
int Level;
int Vidas = 3;
int Velocidad = 4;
public:
Nave(int _X,int _Y);
int getVidas(){ return Vidas;}
int getVelocidad(){ return Velocidad;}
int getX(){ return X;}
int getY(){ return Y;}
void setY(int Y){Nave::Y = Y;}
void setX(int X){Nave::X = X;}
void setVidas(int Vida){ Nave::Vidas = Vida;}
void setVelocidad(int Vel){ Nave::Velocidad = Vel;}
ALLEGRO_BITMAP *Dibujar(char *name);
ALLEGRO_BITMAP *Destruir();
};
#endif //AIRWAR_NAVE_H
<file_sep>//
// Created by gerardo on 30/03/17.
//
#ifndef AIRWAR_JUGADOR_H
#define AIRWAR_JUGADOR_H
class Jugador{
private:
char Nombre;
int Puntaje;
int Maxlvl;
int TiempoJ;
};
#endif //AIRWAR_JUGADOR_H
<file_sep>package com.example.juand.control11;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ImageButton;
import android.widget.TextView;
import android.widget.Toast;
import java.io.BufferedReader;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.net.Socket;
public class ConnectActivity extends AppCompatActivity {
Button btnConect;
ImageButton btnFire,btnLeft,btnUp,btnRight,btnDown;
DataOutputStream salida;
DataInputStream entrada;
Socket cliente;
EditText txtIP,txtPlayer;
TextView txtNomPlay;
String msjServer = null;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_connect);
btnConect=(Button)findViewById(R.id.btnConect);
txtPlayer=(EditText)findViewById(R.id.txtPlayer);
txtIP=(EditText)findViewById(R.id.txtIP);
btnFire=(ImageButton)findViewById(R.id.btnFire);
btnLeft=(ImageButton)findViewById(R.id.btnleft);
btnUp=(ImageButton)findViewById(R.id.btnup);
btnDown=(ImageButton)findViewById(R.id.btndown);
btnRight=(ImageButton)findViewById(R.id.btnright);
txtNomPlay=(TextView)findViewById(R.id.txtPlayerN);
btnConect.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Thread t = new Thread() {
public void run() {
try {
cliente = new Socket(txtIP.getText().toString(), 8081);
salida = new DataOutputStream(cliente.getOutputStream());
entrada = new DataInputStream(cliente.getInputStream());
} catch (IOException e) {
e.printStackTrace();
}
}
};
t.start();
btnFire.setVisibility(View.VISIBLE);
btnLeft.setVisibility(View.VISIBLE);
btnUp.setVisibility(View.VISIBLE);
btnRight.setVisibility(View.VISIBLE);
btnDown.setVisibility(View.VISIBLE);
txtIP.setVisibility(View.INVISIBLE);
txtPlayer.setVisibility(View.INVISIBLE);
btnConect.setVisibility(View.INVISIBLE);
txtNomPlay.setText("Player: "+txtPlayer.getText());
txtNomPlay.setVisibility(View.VISIBLE);
}
});
btnFire.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
try {
salida.writeUTF("Fire");
salida.flush();
//Readerm readmsj = new Readerm(entrada);
//readmsj.start();
//while (msjServer == null){
// msjServer = readmsj.readMessage();
//}
} catch (IOException e) {
e.printStackTrace();
}
msjServer=null;
}
});
btnLeft.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
try {
salida.writeUTF("Left");
salida.flush();
//Readerm readmsj = new Readerm(entrada);
//readmsj.start();
//while (msjServer == null){
// msjServer = readmsj.readMessage();
//}
} catch (IOException e) {
e.printStackTrace();
}
msjServer=null;
}
});
btnRight.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
try {
salida.writeUTF("Right");
salida.flush();
//Readerm readmsj = new Readerm(entrada);
//readmsj.start();
//while (msjServer == null){
// msjServer = readmsj.readMessage();
//}
} catch (IOException e) {
e.printStackTrace();
}
msjServer=null;
}
});
btnDown.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
try {
salida.writeUTF("Down");
salida.flush();
//Readerm readmsj = new Readerm(entrada);
//readmsj.start();
//while (msjServer == null){
// msjServer = readmsj.readMessage();
//}
} catch (IOException e) {
e.printStackTrace();
}
msjServer=null;
}
});
btnUp.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
try {
salida.writeUTF("Up");
salida.flush();
//Readerm readmsj = new Readerm(entrada);
//readmsj.start();
//while (msjServer == null){
// msjServer = readmsj.readMessage();
//}
} catch (IOException e) {
e.printStackTrace();
}
msjServer=null;
}
});
}
}
<file_sep>//
// Created by gerardo on 29/03/17.
//
#ifndef AIRWAR_BALA_H
#define AIRWAR_BALA_H
#include <allegro5/allegro.h>
#include <allegro5/allegro_image.h>
class Bala{
private:
int BalaX;
int BalaY;
int Pot;
public:
Bala(int _X,int _Y,int _Pot);
int getBalaX(){return BalaX;}
void setBalaX(int BalaX) { Bala::BalaX = BalaX;}
int getBalaY(){return BalaY;}
void setBalaY(int BalaY) { Bala::BalaY = BalaY;}
int getPot(){return Pot;}
void setPot(int Pot) { Bala::Pot = Pot;}
ALLEGRO_BITMAP *Dibujar(char *name);
void DisparaNave();
void DisparaEnemy();
};
#endif //AIRWAR_BALA_H
<file_sep>//
// Created by nano on 21/03/17.
//
#include <iostream>
#include <fstream>
#include <allegro5/allegro.h>
#include <allegro5/allegro_image.h>
#include <allegro5/allegro_font.h>
#include <allegro5/allegro_ttf.h>
#include <stdio.h>
#include <string.h>
#include <stdbool.h>
using namespace std;
ALLEGRO_DISPLAY *HomeS = NULL;
ALLEGRO_DISPLAY *AboutS = NULL;
ALLEGRO_DISPLAY *HighS = NULL;
ALLEGRO_DISPLAY *janela = NULL;
ALLEGRO_EVENT_QUEUE *evento = NULL;
ALLEGRO_BITMAP *Home = NULL;
ALLEGRO_BITMAP *Home2 = NULL;
ALLEGRO_BITMAP *Home3 = NULL;
ALLEGRO_BITMAP *Home4 = NULL;
ALLEGRO_BITMAP *About = NULL;
ALLEGRO_BITMAP *About2 = NULL;
ALLEGRO_BITMAP *High = NULL;
ALLEGRO_BITMAP *High2 = NULL;
ALLEGRO_BITMAP *Play = NULL;
ALLEGRO_FONT *fonte = NULL;
ALLEGRO_EVENT_QUEUE *fila_eventos = NULL;
ALLEGRO_MOUSE_STATE mousepos;
void AboutV();
void HomeV();
void HighV();
int PlayV();
void guardar(const char*[]);
void ordenar(char*,int);
const char * getdata();
const int LARGURA_TELA = 640;
const int ALTURA_TELA = 480;
char str[4567];
void manipular_entrada(ALLEGRO_EVENT evento);
void exibir_texto_centralizado();
int main() {
//char Nombre[50]="rata";
//int puntaje=2800;
//ordenar(Nombre,puntaje);
HomeV();
cout<<str<<endl;
return 0;
}
void manipular_entrada(ALLEGRO_EVENT evento) {
if (evento.type == ALLEGRO_EVENT_KEY_CHAR) {
if (strlen(str) <= 25) {
char temp[] = {evento.keyboard.unichar, '\0'};
if (evento.keyboard.unichar == ' ') {
strcat(str, temp);
} else if (evento.keyboard.unichar >= '0' &&
evento.keyboard.unichar <= '9') {
strcat(str, temp);
} else if (evento.keyboard.unichar >= 'A' &&
evento.keyboard.unichar <= 'Z') {
strcat(str, temp);
} else if (evento.keyboard.unichar >= 'a' &&
evento.keyboard.unichar <= 'z') {
strcat(str, temp);
}
}
if (evento.keyboard.keycode == ALLEGRO_KEY_BACKSPACE && strlen(str) != 0) {
str[strlen(str) - 1] = '\0';
}
}
}
void exibir_texto_centralizado() {
if (strlen(str) > 0) {
al_draw_text(fonte, al_map_rgb(0, 0, 0), LARGURA_TELA / 2,
(ALTURA_TELA - al_get_font_ascent(fonte)) / 2,
ALLEGRO_ALIGN_CENTRE, str);
}
}
void HomeV(){
al_init();
al_init_image_addon();
al_install_mouse();
int repaint=1;
bool Salir =true;
HomeS = al_create_display(650, 480);
al_set_window_position(HomeS,350,150);
evento = al_create_event_queue();
al_register_event_source(evento, al_get_display_event_source(HomeS));
al_register_event_source(evento,al_get_mouse_event_source());
Home = al_load_bitmap("/home/nano/Documentos/AirWar++/images/Home/Home.png");
Home2 = al_load_bitmap("/home/nano/Documentos/AirWar++/images/Home/Home2.png");
Home3 = al_load_bitmap("/home/nano/Documentos/AirWar++/images/Home/Home3.png");
Home4 = al_load_bitmap("/home/nano/Documentos/AirWar++/images/Home/Home4.png");
while (Salir){
ALLEGRO_EVENT event;
al_wait_for_event(evento, &event);
al_get_mouse_state(&mousepos);
if (event.type == ALLEGRO_EVENT_DISPLAY_CLOSE){
al_destroy_display(HomeS);
al_destroy_bitmap(Home);
al_destroy_bitmap(Home2);
al_destroy_bitmap(Home3);
al_destroy_bitmap(Home4);
break;
}
if (mousepos.x>264 && mousepos.x<415 && mousepos.y >224 && mousepos.y <277){
al_draw_bitmap(Home2, 0, 0, 0);
al_flip_display();
if (event.type==ALLEGRO_EVENT_MOUSE_BUTTON_DOWN ){
al_destroy_display(HomeS);
al_destroy_bitmap(Home);
al_destroy_bitmap(Home2);
al_destroy_bitmap(Home3);
al_destroy_bitmap(Home4);
PlayV();
break;
}
}else if (mousepos.x>252 && mousepos.x<428 && mousepos.y >298 && mousepos.y <353){
al_draw_bitmap(Home3, 0, 0, 0);
al_flip_display();
if (event.type==ALLEGRO_EVENT_MOUSE_BUTTON_DOWN ){
al_destroy_display(HomeS);
al_destroy_bitmap(Home);
al_destroy_bitmap(Home2);
al_destroy_bitmap(Home3);
al_destroy_bitmap(Home4);
AboutV();
break;
}
}else if (mousepos.x>154 && mousepos.x<522 && mousepos.y >366 && mousepos.y <421) {
al_draw_bitmap(Home4, 0, 0, 0);
al_flip_display();
if (event.type==ALLEGRO_EVENT_MOUSE_BUTTON_DOWN){
al_destroy_display(HomeS);
al_destroy_bitmap(Home);
al_destroy_bitmap(Home2);
al_destroy_bitmap(Home3);
al_destroy_bitmap(Home4);
HighV();
break;
}
}else if (repaint) {
al_draw_bitmap(Home, 0, 0, 0);
al_flip_display();
}
}
}
void AboutV(){
al_init();
al_init_image_addon();
al_install_mouse();
int repaint=1;
bool Salir =true;
AboutS = al_create_display(650, 480);
al_set_window_position(AboutS,350,150);
evento = al_create_event_queue();
al_register_event_source(evento, al_get_display_event_source(AboutS));
al_register_event_source(evento,al_get_mouse_event_source());
About = al_load_bitmap("/home/nano/Documentos/AirWar++/images/About/About.png");
About2 = al_load_bitmap("/home/nano/Documentos/AirWar++/images/About/About2.png");
while (Salir){
ALLEGRO_EVENT event;
al_wait_for_event(evento, &event);
al_get_mouse_state(&mousepos);
if (event.type == ALLEGRO_EVENT_DISPLAY_CLOSE){
al_destroy_bitmap(About);
al_destroy_bitmap(About2);
break;
}
if (mousepos.x>12 && mousepos.x<94 && mousepos.y >422 && mousepos.y <462) {
al_draw_bitmap(About2, 0, 0, 0);
al_flip_display();
if (event.type==ALLEGRO_EVENT_MOUSE_BUTTON_DOWN){
al_destroy_display(AboutS);
al_destroy_bitmap(About);
al_destroy_bitmap(About2);
HomeV();
break;
}
}else if (repaint) {
al_draw_bitmap(About, 0, 0, 0);
al_flip_display();
}
}
}
void HighV(){
al_init();
al_init_image_addon();
al_install_mouse();
int repaint=1;
bool Salir =true;
HighS = al_create_display(650, 480);
al_set_window_position(HighS,350,150);
evento = al_create_event_queue();
al_register_event_source(evento, al_get_display_event_source(HighS));
al_register_event_source(evento,al_get_mouse_event_source());
High = al_load_bitmap("/home/nano/Documentos/AirWar++/images/High Scores/High.png");
High2 = al_load_bitmap("/home/nano/Documentos/AirWar++/images/High Scores/High2.png");
while (Salir){
ALLEGRO_EVENT event;
al_wait_for_event(evento, &event);
al_get_mouse_state(&mousepos);
if (event.type == ALLEGRO_EVENT_DISPLAY_CLOSE){
al_destroy_bitmap(High);
al_destroy_bitmap(High2);
break;
}
if (mousepos.x>12 && mousepos.x<94 && mousepos.y >422 && mousepos.y <462) {
al_draw_bitmap(High2, 0, 0, 0);
al_flip_display();
if (event.type==ALLEGRO_EVENT_MOUSE_BUTTON_DOWN){
al_destroy_display(HighS);
al_destroy_bitmap(High);
al_destroy_bitmap(High2);
HomeV();
break;
}
}else if (repaint) {
al_draw_bitmap(High, 0, 0, 0);
al_flip_display();
}
}
}
int PlayV(){
al_init();
al_init_image_addon();
al_init_font_addon();
al_init_ttf_addon();
al_install_keyboard();
al_install_mouse();
janela = al_create_display(LARGURA_TELA, ALTURA_TELA);
al_set_window_position(janela,350,150);
al_set_window_title(janela, "Entrada de Texto");
fila_eventos = al_create_event_queue();
bool sair = false;
bool concluido = false;
strcpy(str, "");
Play = al_load_bitmap("High.png");
High2 = al_load_bitmap("/home/nano/Documentos/AirWar++/images/High Scores/High2.png");
fonte = al_load_font("Alice.ttf", 42, 0);
al_register_event_source(fila_eventos, al_get_display_event_source(janela));
al_register_event_source(fila_eventos, al_get_keyboard_event_source());
while (!sair) {
while (!al_is_event_queue_empty(fila_eventos)) {
ALLEGRO_EVENT evento;
al_wait_for_event(fila_eventos, &evento);
al_get_mouse_state(&mousepos);
if (!concluido) {
manipular_entrada(evento);
if (evento.type == ALLEGRO_EVENT_KEY_DOWN && evento.keyboard.keycode == ALLEGRO_KEY_ENTER) {
concluido = true;
}
}
if (evento.type == ALLEGRO_EVENT_DISPLAY_CLOSE) {
sair = true;
}
}
al_draw_bitmap(Play, 0, 0, 0);
if (!concluido) {
al_draw_text(fonte, al_map_rgb(0,0,0), LARGURA_TELA / 2,
(ALTURA_TELA / 2 - al_get_font_ascent(fonte)) / 2,
ALLEGRO_ALIGN_CENTRE, "NICKNAME:");
}
exibir_texto_centralizado();
al_flip_display();
}
al_destroy_bitmap(Play);
al_destroy_font(fonte);
al_destroy_event_queue(fila_eventos);
al_destroy_display(janela);
}
void guardar(const char *Data[]){
ofstream Guardar;
Guardar.open("Datos.txt",ios::trunc);
for(int i=0;i<10;i+=2){
Guardar<<Data[i]<<" "<<Data[i+1]<<endl;
}
}
void ordenar(char* nombre,int puntaje){
FILE *miarchivo;
char * archivo="Datos.txt";
const char *Data[10];
char nprimero[80],sprim[80],nsegundo[80],sseg[80],ntercero[80],sterc[80],ncuarto[80],scuar[80],nquinto[80],squi[80];
miarchivo=fopen(archivo,"r");
fscanf(miarchivo,"%s",&nprimero);
fscanf(miarchivo,"%s",&sprim);
int num=atoi(sprim);
fscanf(miarchivo,"%s",&nsegundo);
fscanf(miarchivo,"%s",&sseg);
int num2=atoi(sseg);
fscanf(miarchivo,"%s",&ntercero);
fscanf(miarchivo,"%s",&sterc);
int num3=atoi(sterc);
fscanf(miarchivo,"%s",&ncuarto);
fscanf(miarchivo,"%s",&scuar);
int num4=atoi(scuar);
fscanf(miarchivo,"%s",&nquinto);
fscanf(miarchivo,"%s",&squi);
int num5=atoi(squi);
if (puntaje>num5 && puntaje<num4){
string str=to_string(puntaje);
Data[0]=nprimero;
Data[1]=sprim;
Data[2]=nsegundo;
Data[3]=sseg;
Data[4]=ntercero;
Data[5]=sterc;
Data[6]=ncuarto;
Data[7]=scuar;
Data[8]=nombre;
Data[9]=str.c_str();
guardar(Data);
}else if (puntaje>num4 && puntaje<num3){
string str=to_string(puntaje);
Data[0]=nprimero;
Data[1]=sprim;
Data[2]=nsegundo;
Data[3]=sseg;
Data[4]=ntercero;
Data[5]=sterc;
Data[6]=nombre;
Data[7]=str.c_str();
Data[8]=ncuarto;
Data[9]=scuar;
guardar(Data);
}else if (puntaje>num3 && puntaje<num2){
string str=to_string(puntaje);
Data[0]=nprimero;
Data[1]=sprim;
Data[2]=nsegundo;
Data[3]=sseg;
Data[4]=nombre;
Data[5]=str.c_str();
Data[6]=ntercero;
Data[7]=sterc;
Data[8]=ncuarto;
Data[9]=scuar;
guardar(Data);
}else if (puntaje>num2 && puntaje<num){
string str=to_string(puntaje);
Data[0]=nprimero;
Data[1]=sprim;
Data[2]=nombre;
Data[3]=str.c_str();
Data[4]=nsegundo;
Data[5]=sseg;
Data[6]=ntercero;
Data[7]=sterc;
Data[8]=ncuarto;
Data[9]=scuar;
guardar(Data);
}else if (puntaje>num){
string str=to_string(puntaje);
Data[0]=nombre;
Data[1]=str.c_str();
Data[2]=nprimero;
Data[3]=sprim;
Data[4]=nsegundo;
Data[5]=sseg;
Data[6]=ntercero;
Data[7]=sterc;
Data[8]=ncuarto;
Data[9]=scuar;
guardar(Data);
}else{
Data[0]=nprimero;
Data[1]=sprim;
Data[2]=nsegundo;
Data[3]=sseg;
Data[4]=ntercero;
Data[5]=sterc;
Data[6]=ncuarto;
Data[7]=scuar;
Data[8]=nquinto;
Data[9]=squi;
guardar(Data);
}
}
const char * getdata(){
FILE *miarchivo;
char * archivo="Datos.txt";
const char *Data[10];
miarchivo=fopen(archivo,"r");
fscanf(miarchivo,"%s",&Data[0]);
fscanf(miarchivo,"%s",&Data[1]);
fscanf(miarchivo,"%s",&Data[2]);
fscanf(miarchivo,"%s",&Data[3]);
fscanf(miarchivo,"%s",&Data[4]);
fscanf(miarchivo,"%s",&Data[5]);
fscanf(miarchivo,"%s",&Data[6]);
fscanf(miarchivo,"%s",&Data[7]);
fscanf(miarchivo,"%s",&Data[8]);
fscanf(miarchivo,"%s",&Data[9]);
return *Data;
}<file_sep>//
// Created by gerardo on 21/03/17.
//
#include <iostream>
#include <cstdlib>
#include <ctime>
#include <sys/socket.h>
#include <arpa/inet.h>
#include <cstring>
#include <unistd.h>
#include <allegro5/allegro.h>
#include <allegro5/allegro_image.h>
#include <allegro5/allegro_primitives.h>
#include <allegro5/allegro_audio.h>
#include <allegro5/allegro_acodec.h>
#include <allegro5/allegro_font.h>
#include <allegro5/allegro_ttf.h>
#include <fstream>
#include <stdio.h>
#include <string.h>
#include <stdbool.h>
#include "Objects/Nave.h"
#include "Objects/Objects.h"
#define BUFSIZE 1000
#define PORT 8088
using namespace std;
//_____________________________________
//-------Variables Globales-----------
const int NumBalas=10;
const int NumEnemigos=7;
int Y =-2586;
int Kills=0;
int Puntaje=0;
char PlayerName[40]="JDesq";
int PowerUpsBalaConseguidos=0;
int PowerUpsEscudoConseguidos=0;
int n;
ALLEGRO_DISPLAY *display = NULL;
ALLEGRO_EVENT_QUEUE *evento = NULL;
ALLEGRO_TIMER *timer = NULL;
ALLEGRO_BITMAP *Fondo = NULL;
ALLEGRO_BITMAP *BalaIMG = NULL;
ALLEGRO_BITMAP *Enemy1 = NULL;
ALLEGRO_BITMAP *PSprite = NULL;
ALLEGRO_FONT *Vidas = NULL;
ALLEGRO_THREAD *HiloServer = NULL;
ALLEGRO_SAMPLE *Disparo = NULL;
ALLEGRO_SAMPLE *ExplotaE = NULL;
ALLEGRO_SAMPLE *DeadNave = NULL;
ALLEGRO_BITMAP *PowerUpBalaIMG = NULL; //NUevo
ALLEGRO_BITMAP *PowerUpEscudoIMG = NULL; //NUevo
ALLEGRO_DISPLAY *HomeS = NULL;
ALLEGRO_DISPLAY *AboutS = NULL;
ALLEGRO_DISPLAY *HighS = NULL;
ALLEGRO_DISPLAY *janela = NULL;
ALLEGRO_BITMAP *Home = NULL;
ALLEGRO_BITMAP *Home2 = NULL;
ALLEGRO_BITMAP *Home3 = NULL;
ALLEGRO_BITMAP *Home4 = NULL;
ALLEGRO_BITMAP *About = NULL;
ALLEGRO_BITMAP *About2 = NULL;
ALLEGRO_BITMAP *High = NULL;
ALLEGRO_BITMAP *High2 = NULL;
ALLEGRO_BITMAP *Play = NULL;
ALLEGRO_FONT *fonte = NULL;
ALLEGRO_MOUSE_STATE mousepos;
enum GAME_KEYS
{
KEY_LEFT,
KEY_RIGHT,
KEY_UP,
KEY_DOWN,
KEY_ESC,
KEY_SPACE
};
int key[] = { 0, 0, 0, 0, 0, 0};
void AboutV();
void HomeV();
void HighV();
void PlayV();
void JuegoV();
void guardar(const char*[]);
void ordenar(char*,int);
void getdata();
const int LARGURA_TELA = 640;
const int ALTURA_TELA = 480;
char str[4567];
void manipular_entrada(ALLEGRO_EVENT evento);
void exibir_texto_centralizado();
void InitBalas(Proyectil Bala[], int Tamanho);
void Tirabalas(Proyectil Bala[], int Tamanho, Nave Player);
void ActualizaBalas(Proyectil Bala[], int Tamanho);
void DibujaBalas(Proyectil Bala[],PowerUpBala PowerBala[], int Tamanho);
void BalaChoca(Proyectil Bala[], int B_Tamanho, Enemigo Enemy[],PowerUpBala PowerBala[],PowerUpEscudo PowerEscudo[], int E_Tamanho,Nave Player);
void InitBalasE(Proyectil BalaE[], int Tamanho);
void TirabalasE(Proyectil BalaE[], int Tamanho, Enemigo Enemy[],Nave Player);
void ActualizaBalasE(Proyectil BalaE[], int Tamanho,Enemigo Enemy[],Nave Player);
void DibujaBalasE(Proyectil BalaE[], int Tamanho);
int BalaChocaE(Proyectil BalaE[],PowerUpEscudo PowerEscudo[], int B_Tamanho,Nave Player);
void InitEnemigos(Enemigo Enemy[], int tamanho);
void LiberaEnemigos(Enemigo Enemy[], int tamanho);
void ActualizaEnemigos(Enemigo Enemy[], int tamanho);
void DibujaEnemigos(Enemigo Enemy[], int tamanho);
int EnemigoChoca(Enemigo Enemy[],PowerUpEscudo PowerEscudo[], int tamanho, Nave Player);
void InitPowerUpBala(PowerUpBala PowerBala[], int Tamanho); //nuevo
void LiberaPowerUpBala(PowerUpBala PowerBala[], int tamanho);
void ActualizaPowerUpBala(PowerUpBala PowerBala[], int tamanho);
void DibujaPowerUpBala(PowerUpBala PowerBala[], int tamanho);
int PowerUpBalaChoca(PowerUpBala PowerBala[],Proyectil Bala[], int tamanho, Nave Player);
void InitPowerUpEscudo(PowerUpEscudo PowerEscudo[], int Tamanho); //nuevo
void LiberaPowerUpEscudo(PowerUpEscudo PowerEscudo[], int tamanho);
void ActualizaPowerUpEscudo(PowerUpEscudo PowerEscudo[], int tamanho);
void DibujaPowerUpEscudo(PowerUpEscudo PowerEscudo[], int tamanho);
int PowerUpEscudoChoca(PowerUpEscudo PowerEscudo[],Proyectil Bala[], int tamanho, Nave Player);
int Colision(int b1_x, int b1_y, int b1_w, int b1_h, int b2_x, int b2_y, int b2_w, int b2_h) {
if ((b1_x > b2_x + b2_w - 1) || // is b1 on the right side of b2?
(b1_y > b2_y + b2_h - 1) || // is b1 under b2?
(b2_x > b1_x + b1_w - 1) || // is b2 on the right side of b1?
(b2_y > b1_y + b1_h - 1)) // is b2 under b1?
{
// no collision
return 0;
}
// collision
return 1;
}
void *Escucha(ALLEGRO_THREAD *thr, void *args){
char buffer[BUFSIZE];
// define our address structure, stores our port
// and our ip address, and the socket type, etc..
struct sockaddr_in addrinfo;
addrinfo.sin_family = AF_INET;
addrinfo.sin_port = htons(PORT);
addrinfo.sin_addr.s_addr = INADDR_ANY;
// creacion del socket.
int sock;
if ((sock = socket(addrinfo.sin_family, SOCK_STREAM, 0)) < 0) {
cout << "Error in creating the socket.";
}
// iniciar el socket server
if (bind(sock, (struct sockaddr *) &addrinfo, sizeof(addrinfo)) != 0) {
cout << "Adress Bussy.";
}
// tener el server socket en escucha de conexiones
if (listen(sock, 5) != 0) {
cout << "Error in opening listener.";
}
char *msg = (char *) "success! You are connected.\r\n";
while(1){
cout << "Waiting for connections...." << endl;
struct sockaddr_in client_addr;
socklen_t sin_size = sizeof(client_addr);
if (int cliente =
accept(sock, (struct sockaddr *) &client_addr, &sin_size)) {
cout << "recieved new connection from " << inet_ntoa(client_addr.sin_addr) << endl;
while (1) {
n = read(cliente,buffer,255 );
int num = recv(cliente, buffer, BUFSIZE, 0);
if (num < 1) break;
buffer[num] = '\0';
if (buffer[num - 1] == '\n')
buffer[num - 1] = '\0';
cout << buffer << endl;
strcpy(buffer, "");
if (n < 0)
{
perror("ERROR writing to socket");
}else if(n==6){
cout<<"Dispara"<<endl;
}else if(n==5){
cout<<"Arriba"<<endl;
}else if(n==7){
cout<<"Abajo"<<endl;
}else if(n==9){
cout<<"Derecha"<<endl;
}else if(n==11){
cout<<"Izquierda"<<endl;
}
}
} else {
cout << "Error in accepting new connection." << endl;
}
}
}
int main(){
HomeV();
return 0;
}
//__________________________________________________
//-------------Ventanas-----------------------------
//__________________________________________________
void manipular_entrada(ALLEGRO_EVENT evento) {
if (evento.type == ALLEGRO_EVENT_KEY_CHAR) {
if (strlen(str) <= 25) {
char temp[] = {evento.keyboard.unichar, '\0'};
if (evento.keyboard.unichar == ' ') {
strcat(str, temp);
} else if (evento.keyboard.unichar >= '0' &&
evento.keyboard.unichar <= '9') {
strcat(str, temp);
} else if (evento.keyboard.unichar >= 'A' &&
evento.keyboard.unichar <= 'Z') {
strcat(str, temp);
} else if (evento.keyboard.unichar >= 'a' &&
evento.keyboard.unichar <= 'z') {
strcat(str, temp);
}
}
if (evento.keyboard.keycode == ALLEGRO_KEY_BACKSPACE && strlen(str) != 0) {
str[strlen(str) - 1] = '\0';
}
}
}
void exibir_texto_centralizado() {
if (strlen(str) > 0) {
al_draw_text(fonte, al_map_rgb(0, 0, 0), LARGURA_TELA / 2,
(ALTURA_TELA - al_get_font_ascent(fonte)) / 2,
ALLEGRO_ALIGN_CENTRE, str);
}
}
void guardar(const char *Data[]){
ofstream Guardar;
Guardar.open("Datos.txt",ios::trunc);
for(int i=0;i<10;i+=2){
Guardar<<Data[i]<<" "<<Data[i+1]<<endl;
}
}
void ordenar(char* nombre,int puntaje){
FILE *miarchivo;
char * archivo="Datos.txt";
const char *Data[10];
char nprimero[80],sprim[80],nsegundo[80],sseg[80],ntercero[80],sterc[80],ncuarto[80],scuar[80],nquinto[80],squi[80];
miarchivo=fopen(archivo,"r");
fscanf(miarchivo,"%s",&nprimero);
fscanf(miarchivo,"%s",&sprim);
int num=atoi(sprim);
fscanf(miarchivo,"%s",&nsegundo);
fscanf(miarchivo,"%s",&sseg);
int num2=atoi(sseg);
fscanf(miarchivo,"%s",&ntercero);
fscanf(miarchivo,"%s",&sterc);
int num3=atoi(sterc);
fscanf(miarchivo,"%s",&ncuarto);
fscanf(miarchivo,"%s",&scuar);
int num4=atoi(scuar);
fscanf(miarchivo,"%s",&nquinto);
fscanf(miarchivo,"%s",&squi);
int num5=atoi(squi);
if (puntaje>num5 && puntaje<num4){
string str=to_string(puntaje);
Data[0]=nprimero;
Data[1]=sprim;
Data[2]=nsegundo;
Data[3]=sseg;
Data[4]=ntercero;
Data[5]=sterc;
Data[6]=ncuarto;
Data[7]=scuar;
Data[8]=nombre;
Data[9]=str.c_str();
guardar(Data);
}else if (puntaje>num4 && puntaje<num3){
string str=to_string(puntaje);
Data[0]=nprimero;
Data[1]=sprim;
Data[2]=nsegundo;
Data[3]=sseg;
Data[4]=ntercero;
Data[5]=sterc;
Data[6]=nombre;
Data[7]=str.c_str();
Data[8]=ncuarto;
Data[9]=scuar;
guardar(Data);
}else if (puntaje>num3 && puntaje<num2){
string str=to_string(puntaje);
Data[0]=nprimero;
Data[1]=sprim;
Data[2]=nsegundo;
Data[3]=sseg;
Data[4]=nombre;
Data[5]=str.c_str();
Data[6]=ntercero;
Data[7]=sterc;
Data[8]=ncuarto;
Data[9]=scuar;
guardar(Data);
}else if (puntaje>num2 && puntaje<num){
string str=to_string(puntaje);
Data[0]=nprimero;
Data[1]=sprim;
Data[2]=nombre;
Data[3]=str.c_str();
Data[4]=nsegundo;
Data[5]=sseg;
Data[6]=ntercero;
Data[7]=sterc;
Data[8]=ncuarto;
Data[9]=scuar;
guardar(Data);
}else if (puntaje>num){
string str=to_string(puntaje);
Data[0]=nombre;
Data[1]=str.c_str();
Data[2]=nprimero;
Data[3]=sprim;
Data[4]=nsegundo;
Data[5]=sseg;
Data[6]=ntercero;
Data[7]=sterc;
Data[8]=ncuarto;
Data[9]=scuar;
guardar(Data);
}else{
Data[0]=nprimero;
Data[1]=sprim;
Data[2]=nsegundo;
Data[3]=sseg;
Data[4]=ntercero;
Data[5]=sterc;
Data[6]=ncuarto;
Data[7]=scuar;
Data[8]=nquinto;
Data[9]=squi;
guardar(Data);
}
}
void getdata(){
FILE *miarchivo;
char * archivo="Datos.txt";
char nprimero[80],sprim[80],nsegundo[80],sseg[80],ntercero[80],sterc[80],ncuarto[80],scuar[80],nquinto[80],squi[80];
miarchivo=fopen(archivo,"r");
fscanf(miarchivo,"%s",&nprimero);
fscanf(miarchivo,"%s",&sprim);
al_draw_text(fonte,al_map_rgb(0, 0, 0),150, 100,ALLEGRO_ALIGN_CENTRE,nprimero);
al_draw_text(fonte,al_map_rgb(0, 0, 0),450, 100,ALLEGRO_ALIGN_CENTRE,sprim);
fscanf(miarchivo,"%s",&nsegundo);
fscanf(miarchivo,"%s",&sseg);
al_draw_text(fonte,al_map_rgb(0, 0, 0),150, 150,ALLEGRO_ALIGN_CENTRE,nsegundo);
al_draw_text(fonte,al_map_rgb(0, 0, 0),450, 150,ALLEGRO_ALIGN_CENTRE,sseg);
fscanf(miarchivo,"%s",&ntercero);
fscanf(miarchivo,"%s",&sterc);
al_draw_text(fonte,al_map_rgb(0, 0, 0),150, 200,ALLEGRO_ALIGN_CENTRE,ntercero);
al_draw_text(fonte,al_map_rgb(0, 0, 0),450, 200,ALLEGRO_ALIGN_CENTRE,sterc);
fscanf(miarchivo,"%s",&ncuarto);
fscanf(miarchivo,"%s",&scuar);
al_draw_text(fonte,al_map_rgb(0, 0, 0),150, 250,ALLEGRO_ALIGN_CENTRE,ncuarto);
al_draw_text(fonte,al_map_rgb(0, 0, 0),450, 250,ALLEGRO_ALIGN_CENTRE,scuar);
fscanf(miarchivo,"%s",&nquinto);
fscanf(miarchivo,"%s",&squi);
al_draw_text(fonte,al_map_rgb(0, 0, 0),150, 300,ALLEGRO_ALIGN_CENTRE,nquinto);
al_draw_text(fonte,al_map_rgb(0, 0, 0),450, 300,ALLEGRO_ALIGN_CENTRE,squi);
}
void HomeV(){
al_init();
al_init_image_addon();
al_install_mouse();
int repaint=1;
bool Salir =true;
HomeS = al_create_display(650, 480);
al_set_window_position(HomeS,350,150);
evento = al_create_event_queue();
al_register_event_source(evento, al_get_display_event_source(HomeS));
al_register_event_source(evento,al_get_mouse_event_source());
Home = al_load_bitmap("images/Home/Home.png");
Home2 = al_load_bitmap("images/Home/Home2.png");
Home3 = al_load_bitmap("images/Home/Home3.png");
Home4 = al_load_bitmap("images/Home/Home4.png");
while (Salir){
ALLEGRO_EVENT event;
al_wait_for_event(evento, &event);
al_get_mouse_state(&mousepos);
if (event.type == ALLEGRO_EVENT_DISPLAY_CLOSE){
al_destroy_display(HomeS);
al_destroy_bitmap(Home);
al_destroy_bitmap(Home2);
al_destroy_bitmap(Home3);
al_destroy_bitmap(Home4);
break;
}
if (mousepos.x>264 && mousepos.x<415 && mousepos.y >224 && mousepos.y <277){
al_draw_bitmap(Home2, 0, 0, 0);
al_flip_display();
if (event.type==ALLEGRO_EVENT_MOUSE_BUTTON_DOWN ){
al_destroy_display(HomeS);
al_destroy_bitmap(Home);
al_destroy_bitmap(Home2);
al_destroy_bitmap(Home3);
al_destroy_bitmap(Home4);
PlayV();
break;
}
}else if (mousepos.x>252 && mousepos.x<428 && mousepos.y >298 && mousepos.y <353){
al_draw_bitmap(Home3, 0, 0, 0);
al_flip_display();
if (event.type==ALLEGRO_EVENT_MOUSE_BUTTON_DOWN ){
al_destroy_display(HomeS);
al_destroy_bitmap(Home);
al_destroy_bitmap(Home2);
al_destroy_bitmap(Home3);
al_destroy_bitmap(Home4);
AboutV();
break;
}
}else if (mousepos.x>154 && mousepos.x<522 && mousepos.y >366 && mousepos.y <421) {
al_draw_bitmap(Home4, 0, 0, 0);
al_flip_display();
if (event.type==ALLEGRO_EVENT_MOUSE_BUTTON_DOWN){
al_destroy_display(HomeS);
al_destroy_bitmap(Home);
al_destroy_bitmap(Home2);
al_destroy_bitmap(Home3);
al_destroy_bitmap(Home4);
HighV();
break;
}
}else if (repaint) {
al_draw_bitmap(Home, 0, 0, 0);
al_flip_display();
}
}
}
void AboutV(){
al_init();
al_init_image_addon();
al_install_mouse();
int repaint=1;
bool Salir =true;
AboutS = al_create_display(650, 480);
al_set_window_position(AboutS,350,150);
evento = al_create_event_queue();
al_register_event_source(evento, al_get_display_event_source(AboutS));
al_register_event_source(evento,al_get_mouse_event_source());
About = al_load_bitmap("images/About/About.png");
About2 = al_load_bitmap("images/About/About2.png");
while (Salir){
ALLEGRO_EVENT event;
al_wait_for_event(evento, &event);
al_get_mouse_state(&mousepos);
if (event.type == ALLEGRO_EVENT_DISPLAY_CLOSE){
al_destroy_bitmap(About);
al_destroy_bitmap(About2);
Salir=false;
}
if (mousepos.x>12 && mousepos.x<94 && mousepos.y >422 && mousepos.y <462) {
al_draw_bitmap(About2, 0, 0, 0);
al_flip_display();
if (event.type==ALLEGRO_EVENT_MOUSE_BUTTON_DOWN){
al_destroy_display(AboutS);
al_destroy_bitmap(About);
al_destroy_bitmap(About2);
HomeV();
Salir=false;
}
}else if (repaint) {
al_draw_bitmap(About, 0, 0, 0);
al_flip_display();
}
}
}
void HighV(){
al_init();
al_init_font_addon();
al_init_ttf_addon();
al_init_image_addon();
al_init_primitives_addon();
al_install_mouse();
int repaint=1;
bool Salir =true;
HighS = al_create_display(650, 480);
al_set_window_position(HighS,350,150);
evento = al_create_event_queue();
al_register_event_source(evento, al_get_display_event_source(HighS));
al_register_event_source(evento,al_get_mouse_event_source());
fonte = al_load_font("Fonts/Alice.ttf", 42, 0);
High = al_load_bitmap("images/High Scores/High.png");
High2 = al_load_bitmap("images/High Scores/High2.png");
while (Salir){
ALLEGRO_EVENT event;
al_wait_for_event(evento, &event);
al_get_mouse_state(&mousepos);
if (event.type == ALLEGRO_EVENT_DISPLAY_CLOSE){
al_destroy_bitmap(High);
al_destroy_bitmap(High2);
Salir=false;
}
if (mousepos.x>12 && mousepos.x<94 && mousepos.y >422 && mousepos.y <462) {
al_draw_bitmap(High2, 0, 0, 0);
al_draw_filled_rectangle(100,90,520,350,al_map_rgb(255,255,255));
getdata();
al_flip_display();
if (event.type==ALLEGRO_EVENT_MOUSE_BUTTON_DOWN){
al_destroy_display(HighS);
al_destroy_bitmap(High);
al_destroy_bitmap(High2);
HomeV();
Salir=false;
}
}else if (repaint) {
al_draw_bitmap(High, 0, 0, 0);
al_draw_filled_rectangle(100,90,520,350,al_map_rgb(255,255,255));
getdata();
al_flip_display();
}
}
}
void PlayV(){
al_init();
al_init_image_addon();
al_init_font_addon();
al_init_ttf_addon();
al_install_keyboard();
al_install_mouse();
janela = al_create_display(LARGURA_TELA, ALTURA_TELA);
al_set_window_position(janela,350,150);
al_set_window_title(janela, "Entrada de Texto");
evento = al_create_event_queue();
int repaint=1;
bool sair = false;
bool concluido = false;
strcpy(str, "");
Play = al_load_bitmap("images/Play/High.jpeg");
High2 = al_load_bitmap("images/High Scores/High2.png");
fonte = al_load_font("Fonts/Alice.ttf", 42, 0);
al_register_event_source(evento, al_get_display_event_source(janela));
al_register_event_source(evento, al_get_keyboard_event_source());
while (!sair) {
while(!al_is_event_queue_empty(evento)) {
ALLEGRO_EVENT event;
al_wait_for_event(evento, &event);
al_get_mouse_state(&mousepos);
if (!concluido) {
manipular_entrada(event);
if (event.type == ALLEGRO_EVENT_KEY_DOWN && event.keyboard.keycode == ALLEGRO_KEY_ENTER) {
concluido = true;
}
}
if (event.type == ALLEGRO_EVENT_DISPLAY_CLOSE) {
sair = true;
}
if (mousepos.x>12 && mousepos.x<94 && mousepos.y >422 && mousepos.y <462) {
al_draw_bitmap(High2, 0, 0, 0);
al_flip_display();
if (event.type == ALLEGRO_EVENT_MOUSE_BUTTON_DOWN) {
al_destroy_display(AboutS);
al_destroy_bitmap(About);
al_destroy_bitmap(About2);
HomeV();
sair = true;
}
}
}
if (concluido){
al_destroy_bitmap(Play);
al_destroy_bitmap(High2);
JuegoV();
sair=true;
}
if (repaint) {
al_draw_bitmap(Play, 0, 0, 0);
exibir_texto_centralizado();
al_flip_display();
}
}
al_destroy_bitmap(Play);
al_destroy_font(fonte);
al_destroy_event_queue(evento);
}
void JuegoV(){
al_init();
al_init_image_addon();
al_install_keyboard();
al_init_primitives_addon();
al_install_audio();
al_init_acodec_addon();
al_reserve_samples(5);
al_install_mouse();
al_init_font_addon();
al_init_ttf_addon();
al_destroy_display(janela);
bool Salir =true;
int repaint = 1;
//------------Objetos-----------------
Nave Player = Nave(300,380);
Proyectil BalaNAVE[NumBalas];
PowerUpBala PowerBala [NumEnemigos]; //nuevo
PowerUpEscudo PowerEscudo [NumEnemigos]; //nuevo
Proyectil BalaEnemigo[10];
Enemigo Enemy[NumEnemigos];
//------------Carga de archivos-----------------
Fondo = al_load_bitmap("images/Texture.png");
BalaIMG = al_load_bitmap("images/Bala.png");
PowerUpBalaIMG = al_load_bitmap("images/power1.png");//nuevo
PowerUpEscudoIMG = al_load_bitmap("images/power2.png");//nuevo
Disparo = al_load_sample("Sounds/Shot.wav");
ExplotaE = al_load_sample("Sounds/ExplodeE.WAV");
Vidas = al_load_font("Fonts/arial.ttf",12,0);
DeadNave = al_load_sample("Sounds/DeadN.wav");
PSprite = Player.Dibujar("images/Nave2.png");
display = al_create_display(650,480);
al_set_window_position(display,350,150);
evento = al_create_event_queue();
timer = al_create_timer(1.0 / 60);
al_register_event_source(evento, al_get_mouse_event_source());
al_register_event_source(evento, al_get_display_event_source(display));
al_register_event_source(evento, al_get_timer_event_source(timer));
al_register_event_source(evento, al_get_keyboard_event_source());
//Threads
srand(time(NULL));
HiloServer = al_create_thread(Escucha,NULL);
al_start_thread(HiloServer);
InitBalas(BalaNAVE,NumBalas);
InitPowerUpBala(PowerBala,NumEnemigos);//nuevo
InitPowerUpEscudo(PowerEscudo,NumEnemigos);//nuevo
InitBalasE(BalaEnemigo,NumEnemigos);
InitEnemigos(Enemy,NumEnemigos);
al_start_timer(timer);
while (!key[KEY_ESC] && Salir) {
ALLEGRO_EVENT event;
al_wait_for_event(evento, &event);
if (event.type == ALLEGRO_EVENT_DISPLAY_CLOSE)
Salir = false;
if (event.type == ALLEGRO_EVENT_KEY_DOWN) {
if (event.keyboard.keycode == ALLEGRO_KEY_LEFT)
key[KEY_LEFT] = 1;
if (event.keyboard.keycode == ALLEGRO_KEY_RIGHT)
key[KEY_RIGHT] = 1;
if (event.keyboard.keycode == ALLEGRO_KEY_UP)
key[KEY_UP] = 1;
if (event.keyboard.keycode == ALLEGRO_KEY_DOWN)
key[KEY_DOWN] = 1;
if (event.keyboard.keycode == ALLEGRO_KEY_SPACE || n == 6) {
key[KEY_SPACE] = 1;
Tirabalas(BalaNAVE, NumBalas, Player);
}
if (event.keyboard.keycode == ALLEGRO_KEY_ESCAPE)
key[KEY_ESC] = 1;
}
if (event.type == ALLEGRO_EVENT_KEY_UP) {
if (event.keyboard.keycode == ALLEGRO_KEY_LEFT)
key[KEY_LEFT] = 0;
if (event.keyboard.keycode == ALLEGRO_KEY_RIGHT)
key[KEY_RIGHT] = 0;
if (event.keyboard.keycode == ALLEGRO_KEY_UP)
key[KEY_UP] = 0;
if (event.keyboard.keycode == ALLEGRO_KEY_DOWN)
key[KEY_DOWN] = 0;
if (event.keyboard.keycode == ALLEGRO_KEY_ESCAPE)
key[KEY_ESC] = 0;
}
if (event.type == ALLEGRO_EVENT_TIMER) {
if ((key[KEY_LEFT] && Player.getX() >= 0) || (n == 11 && Player.getX() >= 0)) {
Player.setX(Player.getX() - Player.getVelocidad());
Player.setOR(1);
if(PowerEscudo->paraUsar==true){
PSprite = Player.Dibujar("images/NaveEscudo1.png");
}else{
PSprite = Player.Dibujar("images/Nave1.png");
}
}
if ((key[KEY_RIGHT] && Player.getX() <= 575) || (n == 9 && Player.getX() <= 575)) {
Player.setX(Player.getX() + Player.getVelocidad());
Player.setOR(3);
if(PowerEscudo->paraUsar==true){
PSprite = Player.Dibujar("images/NaveEscudo3.png");
}else{
PSprite = Player.Dibujar("images/Nave3.png");
}
}
if ((key[KEY_UP] && Player.getY() >= 0) || (n == 5 && Player.getY() >= 0)) {
Player.setY(Player.getY() - Player.getVelocidad());
Player.setOR(2);
if(PowerEscudo->paraUsar==true){
PSprite = Player.Dibujar("images/NaveEscudo2.png");
}else{
PSprite = Player.Dibujar("images/Nave2.png");
}
}
if ((key[KEY_DOWN] && Player.getY() <= 415) || (n == 7 && Player.getY() <= 415)) {
Player.setY(Player.getY() + Player.getVelocidad());
Player.setOR(4);
if(PowerEscudo->paraUsar==true){
PSprite = Player.Dibujar("images/NaveEscudo4.png");
}else{
PSprite = Player.Dibujar("images/Nave4.png");
}
}
if(key[KEY_UP] && key[KEY_LEFT]) {
Player.setOR(5);
if(PowerEscudo->paraUsar==true){
PSprite = Player.Dibujar("images/NaveEscudo5.png");
}else{
PSprite = Player.Dibujar("images/Nave5.png");
}
}
if(key[KEY_UP] && key[KEY_RIGHT]) {
Player.setOR(6);
if(PowerEscudo->paraUsar==true){
PSprite = Player.Dibujar("images/NaveEscudo6.png");
}else{
PSprite = Player.Dibujar("images/Nave6.png");
}
}
if(key[KEY_DOWN] && key[KEY_RIGHT]){
Player.setOR(8);
if(PowerEscudo->paraUsar==true){
PSprite = Player.Dibujar("images/NaveEscudo8.png");
}else{
PSprite = Player.Dibujar("images/Nave8.png");
}
}
if(key[KEY_DOWN] && key[KEY_LEFT]) {
Player.setOR(7);
if(PowerEscudo->paraUsar==true){
PSprite = Player.Dibujar("images/NaveEscudo7.png");
}else{
PSprite = Player.Dibujar("images/Nave7.png");
}
}
if (key[KEY_SPACE]) {
ActualizaBalas(BalaNAVE, NumBalas);
}
TirabalasE(BalaEnemigo,NumEnemigos,Enemy,Player);
ActualizaBalasE(BalaEnemigo,NumEnemigos,Enemy,Player);
Player.setSalud(BalaChocaE(BalaEnemigo,PowerEscudo,NumEnemigos,Player));
PowerUpBalaChoca(PowerBala,BalaNAVE, NumEnemigos, Player);
PowerUpEscudoChoca(PowerEscudo,BalaNAVE, NumEnemigos, Player);
BalaChoca(BalaNAVE, NumBalas, Enemy,PowerBala,PowerEscudo, NumEnemigos,Player);
Player.setVidas(EnemigoChoca(Enemy,PowerEscudo, NumEnemigos, Player));
LiberaEnemigos(Enemy, NumEnemigos);
ActualizaEnemigos(Enemy, NumEnemigos);
LiberaPowerUpBala(PowerBala, NumEnemigos); //nuevo
ActualizaPowerUpBala(PowerBala, NumEnemigos); //nuevo
LiberaPowerUpEscudo(PowerEscudo, NumEnemigos); //nuevo
ActualizaPowerUpEscudo(PowerEscudo, NumEnemigos); //nuevo
if (Kills == 10) {
Player.setVidas(Player.getVidas() + 1);
cout << "Ganaste una Vida!"<<endl;
Kills=0;
}
repaint = 1;
}
if (key[KEY_ESC])
Salir = false;
if(Player.getVidas() == 0){
ordenar(str,Puntaje);
cout << "-----------------------------------------" << endl;
cout << endl << "Moriste"<< endl;
cout << "Tu puntaje es: " << Puntaje << endl;
break;
}
if(Player.getSalud() <= 0){
Player.setVidas(Player.getVidas()-1);
cout << "Te quedan: " << Player.getVidas() << " vidas" << endl;
PSprite = Player.Dibujar("images/Boom.png");
Player.setSalud(100);
if (rand()%100==0){
Player.setX(300);
Player.setY(380);
PSprite = Player.Dibujar("images/Nave2.png");
}
}
if (repaint){
al_clear_to_color(al_map_rgb(0,0,0));
if (repaint){al_draw_bitmap(Fondo,0,Y,0); Y+=2; if (Y>=-544) Y=-2586;}
al_draw_bitmap(PSprite,Player.getX(),Player.getY(),0);
DibujaBalas(BalaNAVE,PowerBala,NumBalas);
DibujaBalasE(BalaEnemigo,NumEnemigos);
DibujaEnemigos(Enemy,NumEnemigos);
DibujaPowerUpBala(PowerBala,NumEnemigos);//nuevo
DibujaPowerUpEscudo(PowerEscudo,NumEnemigos);//nuevo
al_draw_filled_rectangle(0,0,650,20,al_map_rgb(0,0,0));
al_draw_textf(Vidas, al_map_rgb(255,255,255),30,4,ALLEGRO_ALIGN_CENTRE,"Vidas: %i", Player.getVidas());
al_draw_textf(Vidas, al_map_rgb(255,255,255),120,4,ALLEGRO_ALIGN_CENTRE,"Salud: %i", Player.getSalud());
al_draw_text(Vidas, al_map_rgb(255,255,255),300,4,ALLEGRO_ALIGN_CENTRE,str);
al_draw_textf(Vidas, al_map_rgb(255,255,255),580,4,ALLEGRO_ALIGN_CENTRE,"Puntaje: %i", Puntaje);
al_flip_display();
repaint = 0;
}
}
al_destroy_display(display);
al_destroy_event_queue(evento);
al_destroy_timer(timer);
al_destroy_bitmap(Fondo);
al_destroy_bitmap(BalaIMG);
al_destroy_bitmap(Enemy1);
al_destroy_bitmap(PSprite);
al_destroy_bitmap(PowerUpBalaIMG);
al_destroy_bitmap(PowerUpEscudoIMG);
al_destroy_font(Vidas);
al_destroy_sample(Disparo);
al_destroy_sample(ExplotaE);
al_destroy_sample(DeadNave);
}
//_________________________________________________
//---------Func. Balas-------------
void InitBalas(Proyectil Bala[], int Tamanho){
for(int i=0;i< Tamanho;i++){
Bala[i].velocidad=7;
Bala[i].bordeX=5;
Bala[i].bordeY=5;
Bala[i].activo = false;
}
}
void Tirabalas(Proyectil Bala[], int Tamanho, Nave Player){
for(int i=0;i< Tamanho;i++) {
if (!Bala[i].activo){
al_play_sample(Disparo, 1.0, 0.0,1.0,ALLEGRO_PLAYMODE_ONCE,NULL);
Bala[i].x = Player.getX()+36;
Bala[i].y = Player.getY()+32;
Bala[i].dir=Player.getOR();
Bala[i].activo=true;
break;
}
}
}
void ActualizaBalas(Proyectil Bala[], int Tamanho){
for(int i=0;i< Tamanho;i++) {
if (Bala[i].activo){
if(Bala[i].dir==1) {
Bala[i].x -= Bala[i].velocidad;
if (Bala[i].x < -15)
Bala[i].activo = false;
}
else if(Bala[i].dir==2) {
Bala[i].y -= Bala[i].velocidad;
if (Bala[i].y < -30)
Bala[i].activo = false;
}
else if(Bala[i].dir==3) {
Bala[i].x += Bala[i].velocidad;
if (Bala[i].x > 680)
Bala[i].activo = false;
}
else if(Bala[i].dir==4) {
Bala[i].y += Bala[i].velocidad;
if (Bala[i].y > 490)
Bala[i].activo = false;
}
else if(Bala[i].dir==5) {
Bala[i].x -= Bala[i].velocidad;
Bala[i].y -= Bala[i].velocidad;
if (Bala[i].y < -30 || Bala[i].x < -10)
Bala[i].activo = false;
}
else if(Bala[i].dir==6) {
Bala[i].x += Bala[i].velocidad;
Bala[i].y -= Bala[i].velocidad;
if (Bala[i].y < -30 || Bala[i].x > 680)
Bala[i].activo = false;
}
else if(Bala[i].dir==7) {
Bala[i].x -= Bala[i].velocidad;
Bala[i].y += Bala[i].velocidad;
if (Bala[i].y > 490 || Bala[i].x < -10)
Bala[i].activo = false;
}
else if(Bala[i].dir==8) {
Bala[i].x += Bala[i].velocidad;
Bala[i].y += Bala[i].velocidad;
if (Bala[i].y >490 || Bala[i].x >680)
Bala[i].activo = false;
}
}
}
}
void DibujaBalas(Proyectil Bala[],PowerUpBala PowerBala[], int Tamanho){
for(int i=0;i< Tamanho;i++) {
if (Bala[i].activo){
if (PowerBala[i].paraUsar==true){
al_draw_filled_circle(Bala[i].x,Bala[i].y,8,al_map_rgb(216,30,5));
}else{
al_draw_filled_circle(Bala[i].x,Bala[i].y,4,al_map_rgb(0,0,0));
}
}
}
}
void BalaChoca(Proyectil Bala[], int B_Tamanho, Enemigo Enemy[],PowerUpBala PowerBala[],PowerUpEscudo PowerEscudo[], int E_Tamanho,Nave Player){
for(int i=0;i<B_Tamanho;i++){
for(int j=0;j<E_Tamanho;j++){
if(Colision(Bala[i].x,Bala[i].y,Bala[i].bordeX,Bala[i].bordeY,Enemy[j].x,Enemy[j].y,Enemy[j].bordeX,Enemy[j].bordeY) && (Bala[i].activo && Enemy[j].activo)){
al_play_sample(ExplotaE, 1.0, 0.0,1.0,ALLEGRO_PLAYMODE_ONCE,NULL);
Bala[i].activo=false;
Enemy[j].activo= false;
Puntaje += 100;
Kills++;
PowerBala->paraUsar=false;
}else if(Colision(Bala[i].x,Bala[i].y,Bala[i].bordeX,Bala[i].bordeY,PowerBala[j].x,PowerBala[j].y,PowerBala[j].bordeX,PowerBala[j].bordeY) && (Bala[i].activo && PowerBala[j].activo)){
Bala[i].activo=false; //esto es nuevo
PowerUpsBalaConseguidos++;
cout << "Has conseguido " << PowerUpsBalaConseguidos << " power ups bala" << endl; //esto es nuevo
PowerBala[j].activo= false; //esto es nuevo
PowerBala->paraUsar=true;
if (PowerBala[i].paraUsar==true){ //esto es nuevo
al_draw_filled_circle(Bala[i].x,Bala[i].y,8,al_map_rgb(216,30,5));
}else{
al_draw_filled_circle(Bala[i].x,Bala[i].y,4,al_map_rgb(0,0,0));
}
}else if(Colision(Bala[i].x,Bala[i].y,Bala[i].bordeX,Bala[i].bordeY,PowerEscudo[j].x,PowerEscudo[j].y,PowerEscudo[j].bordeX,PowerEscudo[j].bordeY) && (Bala[i].activo && PowerEscudo[j].activo)){
Bala[i].activo=false; //esto es nuevo
PowerUpsEscudoConseguidos++;
cout << "Has conseguido " << PowerUpsEscudoConseguidos << " power ups escudo" << endl; //esto es nuevo
PowerEscudo[j].activo= false; //esto es nuevo
PowerEscudo->paraUsar=true;
}
}
}
}
//---------Func. PowerUpBala-------------
void InitPowerUpBala(PowerUpBala PowerBala[], int tamanho){
for(int i=0;i< tamanho;i++){
PowerBala[i].velocidad=3;
PowerBala[i].bordeX=50;
PowerBala[i].bordeY=50;
PowerBala[i].activo = false;
PowerBala[i].paraUsar = false;
}
}
void LiberaPowerUpBala(PowerUpBala PowerBala[], int tamanho){
for(int i=0;i< tamanho;i++) {
if (!PowerBala[i].activo) {
if ((rand() % 8000) == 0) {
PowerBala[i].x = 60+ rand() % (650-120);
PowerBala[i].y = 0;
PowerBala[i].activo = true;
break;
}
}
}
}
void ActualizaPowerUpBala(PowerUpBala PowerBala[], int tamanho){
for(int i=0;i< tamanho;i++) {
if (PowerBala[i].activo){
PowerBala[i].y += PowerBala[i].velocidad;
if (PowerBala[i].y > 450)
PowerBala[i].activo= false;
}
}
}
void DibujaPowerUpBala(PowerUpBala PowerBala[], int tamanho){
for(int i=0;i< tamanho;i++) {
if (PowerBala[i].activo){
al_draw_bitmap(PowerUpBalaIMG, PowerBala[i].x, PowerBala[i].y, 0);
}
}
}
int PowerUpBalaChoca(PowerUpBala PowerBala[],Proyectil Bala[], int tamanho, Nave Player){
for(int i=0; i< tamanho; i++) {
if (PowerBala[i].activo) {
if (Colision(PowerBala[i].x, PowerBala[i].y, PowerBala[i].bordeX, PowerBala[i].bordeY, Player.getX() + 10, Player.getY() + 10, Player.getBordeX(), Player.getBordeY())) {
PowerUpsBalaConseguidos++; //esto es nuevo
cout << "Has conseguido " << PowerUpsBalaConseguidos << " power ups bala" << endl;
PowerBala[i].activo = false;
PowerBala->paraUsar=true;
if (PowerBala[i].paraUsar==true){ //esto es nuevo
al_draw_filled_circle(Bala[i].x,Bala[i].y,8,al_map_rgb(216,30,5));
}else{
al_draw_filled_circle(Bala[i].x,Bala[i].y,4,al_map_rgb(0,0,0));
}
}
}
}
}
//---------Func. PowerUpEscudo-------------
void InitPowerUpEscudo(PowerUpEscudo PowerEscudo[], int tamanho){
for(int i=0;i< tamanho;i++){
PowerEscudo[i].velocidad=3;
PowerEscudo[i].bordeX=50;
PowerEscudo[i].bordeY=50;
PowerEscudo[i].activo = false;
PowerEscudo[i].paraUsar = false;
}
}
void LiberaPowerUpEscudo(PowerUpEscudo PowerEscudo[], int tamanho){
for(int i=0;i< tamanho;i++) {
if (!PowerEscudo[i].activo) {
if ((rand() % 8000) == 0) {
PowerEscudo[i].x = 60+ rand() % (650-120);
PowerEscudo[i].y = 0;
PowerEscudo[i].activo = true;
break;
}
}
}
}
void ActualizaPowerUpEscudo(PowerUpEscudo PowerEscudo[], int tamanho){
for(int i=0;i< tamanho;i++) {
if (PowerEscudo[i].activo){
PowerEscudo[i].y += PowerEscudo[i].velocidad;
if (PowerEscudo[i].y > 450)
PowerEscudo[i].activo= false;
}
}
}
void DibujaPowerUpEscudo(PowerUpEscudo PowerEscudo[], int tamanho){
for(int i=0;i< tamanho;i++) {
if (PowerEscudo[i].activo){
al_draw_bitmap(PowerUpEscudoIMG, PowerEscudo[i].x, PowerEscudo[i].y, 0);
}
}
}
int PowerUpEscudoChoca(PowerUpEscudo PowerEscudo[],Proyectil Bala[], int tamanho, Nave Player){
for(int i=0; i< tamanho; i++) {
if (PowerEscudo[i].activo) {
if (Colision(PowerEscudo[i].x, PowerEscudo[i].y, PowerEscudo[i].bordeX, PowerEscudo[i].bordeY, Player.getX() + 10, Player.getY() + 10, Player.getBordeX(), Player.getBordeY())) {
PowerUpsEscudoConseguidos++; //esto es nuevo
cout << "Has conseguido " << PowerUpsEscudoConseguidos << " power ups escudo" << endl;
PowerEscudo[i].activo = false;
PowerEscudo->paraUsar=true;
}
}
}
}
//--------Func. BalaE.........................................
void InitBalasE(Proyectil BalaE[], int Tamanho){
for(int i=0;i< Tamanho;i++){
BalaE[i].velocidad=6;
BalaE[i].bordeX=5;
BalaE[i].bordeY=5;
BalaE[i].activo = false;
}
}
void TirabalasE(Proyectil BalaE[], int Tamanho, Enemigo Enemy[],Nave Player){
for(int i=0;i< Tamanho;i++) {
if(Enemy[i].activo && Enemy[i].shot) {
for (int j = 0; j < Tamanho; j++) {
if (!BalaE[j].activo && (rand() % 1000 == 0)) {
BalaE[j].x = Enemy[i].x + 25;
BalaE[j].potencia = Enemy[i].potencia;
BalaE[j].destX = Player.getX();
BalaE[j].destY = Player.getY();
BalaE[j].y = Enemy[i].y + 30;
BalaE[j].activo = true;
break;
}
}
}
}
}
void ActualizaBalasE(Proyectil BalaE[], int Tamanho, Enemigo Enemy[],Nave Player){
for(int i=0;i< Tamanho;i++) {
for(int j=0; j < Tamanho;j++){
if (BalaE[j].activo) {
if (Enemy[i].type == 3 || Enemy[i].type==4){
if(BalaE[j].destX > BalaE[j].x && BalaE[j].destY > BalaE[j].y){
BalaE[j].x += BalaE[j].velocidad;
BalaE[j].y += BalaE[j].velocidad;
if (BalaE[j].x > 690 || BalaE[j].y > 500)
BalaE[i].activo = false;
}
else if(BalaE[j].destX < BalaE[j].x && BalaE[j].destY > BalaE[j].y){
BalaE[j].x -= BalaE[j].velocidad;
BalaE[j].y += BalaE[j].velocidad;
if (BalaE[j].x < -10 || BalaE[j].y > 500)
BalaE[i].activo = false;
}
else if(BalaE[j].destX < BalaE[j].x && BalaE[j].destY < BalaE[j].y){
BalaE[j].x -= BalaE[j].velocidad;
BalaE[j].y -= BalaE[j].velocidad;
if (BalaE[j].x < -10 || BalaE[j].y < -10)
BalaE[i].activo = false;
}
else if(BalaE[j].destX > BalaE[j].x && BalaE[j].destY < BalaE[j].y){
BalaE[j].x += BalaE[j].velocidad;
BalaE[j].y -= BalaE[j].velocidad;
if (BalaE[j].x > 690 || BalaE[j].y < -10)
BalaE[i].activo = false;
}
}
else {
BalaE[j].y += BalaE[j].velocidad;
if (BalaE[j].y > 500)
BalaE[j].activo = false;
}
}
}
}
}
void DibujaBalasE(Proyectil BalaE[], int Tamanho){
for(int i=0;i< Tamanho;i++) {
if (BalaE[i].activo){
al_draw_filled_circle(BalaE[i].x,BalaE[i].y,4,al_map_rgb(0,0,0));
}
}
}
int BalaChocaE(Proyectil BalaE[],PowerUpEscudo PowerEscudo[], int B_Tamanho,Nave Player){
for(int i=0; i < B_Tamanho;i++){
if(Colision(BalaE[i].x,BalaE[i].y,BalaE[i].bordeX,BalaE[i].bordeY,Player.getX(),Player.getY(),Player.getBordeX(),Player.getBordeY()) && BalaE[i].activo){
if(PowerEscudo->paraUsar==true){
cout<<"Tienes escudo activado"<<endl;
BalaE[i].activo=false;
}else{
Player.setSalud(Player.getSalud()-BalaE[i].potencia);
al_play_sample(DeadNave, 1.0, 0.0,1.0,ALLEGRO_PLAYMODE_ONCE,NULL);
BalaE[i].activo=false;
}
PowerEscudo->paraUsar=false;
}
if(Colision(BalaE[i].x,BalaE[i].y,BalaE[i].bordeX,BalaE[i].bordeY,BalaE[i].destX,BalaE[i].destY,BalaE[i].destX+10,BalaE[i].destY+10)){
BalaE[i].activo=false;
}
}
return Player.getSalud();
}
//---------Func. Enemigos--------------------------------------
void InitEnemigos(Enemigo Enemy[], int tamanho){
for(int i=0;i< tamanho;i++){
Enemy[i].activo = false;
}
}
void LiberaEnemigos(Enemigo Enemy[], int tamanho){
for(int i=0;i< tamanho;i++) {
if (!Enemy[i].activo) {
if ((rand() % 1000) == 0) {
Enemy[i].type = rand()%5+1;
//Jets
if(Enemy[i].type == 1){
Enemy[i].velocidad=4;
Enemy[i].bordeX=50;
Enemy[i].bordeY=50;
Enemy[i].potencia = 10;
Enemy[i].salud = 10;
Enemy[i].x = 60+ rand() % (650-120);
Enemy[i].y = 0;
Enemy[i].shot=true;
Enemy[i].activo = true;
break;
}
//Bombardero
else if(Enemy[i].type == 2) {
Enemy[i].velocidad = 2;
Enemy[i].bordeX = 50;
Enemy[i].bordeY = 50;
Enemy[i].potencia = 30;
Enemy[i].salud = 20;
Enemy[i].x = 60 + rand() % (650 - 120);
Enemy[i].y = 0;
Enemy[i].shot=true;
Enemy[i].activo = true;
break;
}
//Torres
else if(Enemy[i].type == 3){
Enemy[i].velocidad=0;
Enemy[i].bordeX=75;
Enemy[i].bordeY=75;
Enemy[i].potencia = 20;
Enemy[i].salud = 20;
Enemy[i].x = 60+ rand() % (650-120);
Enemy[i].y = 20+ rand() % (480-40);
Enemy[i].activo = true;
break;
}
//Torres de misiles
else if(Enemy[i].type == 4){
Enemy[i].velocidad=0;
Enemy[i].potencia = 30;
Enemy[i].bordeX=75;
Enemy[i].bordeY=75;
Enemy[i].salud = 20;
Enemy[i].x = 60+ rand() % (650-120);
Enemy[i].y = 20+ rand() % (480-120);
Enemy[i].shot=true;
Enemy[i].activo = true;
break;
}
else if(Enemy[i].type == 5){
Enemy[i].velocidad=2;
Enemy[i].bordeX=100;
Enemy[i].bordeY=75;
Enemy[i].potencia = 0;
Enemy[i].salud = 20;
Enemy[i].x = 60+ rand() % (650-120);
Enemy[i].y = 0;
Enemy[i].shot= false;
Enemy[i].activo = true;
break;
}
}
}
}
}
void ActualizaEnemigos(Enemigo Enemy[], int tamanho){
for(int i=0;i< tamanho;i++) {
if (Enemy[i].activo){
Enemy[i].y += Enemy[i].velocidad;
if (Enemy[i].y > 450)
Enemy[i].activo= false;
}
}
}
void DibujaEnemigos(Enemigo Enemy[], int tamanho){
for(int i=0;i< tamanho;i++) {
if (Enemy[i].activo){
if(Enemy[i].type==1) {
Enemy1 = al_load_bitmap("images/Enemy3.png");
al_draw_bitmap(Enemy1, Enemy[i].x, Enemy[i].y, ALLEGRO_FLIP_VERTICAL);
}
else if(Enemy[i].type==2) {
Enemy1 = al_load_bitmap("images/Enemy1.png");
al_draw_bitmap(Enemy1, Enemy[i].x, Enemy[i].y, 0);
}
else if(Enemy[i].type==3) {
Enemy1 = al_load_bitmap("images/Tower2.png");
al_draw_bitmap(Enemy1, Enemy[i].x, Enemy[i].y, 0);
}
else if(Enemy[i].type==4) {
Enemy1 = al_load_bitmap("images/Tower1.png");
al_draw_bitmap(Enemy1, Enemy[i].x, Enemy[i].y, 0);
}
else if(Enemy[i].type==5) {
Enemy1 = al_load_bitmap("images/Enemy5.png");
al_draw_bitmap(Enemy1, Enemy[i].x, Enemy[i].y, ALLEGRO_FLIP_VERTICAL);
}
}
}
}
int EnemigoChoca(Enemigo Enemy[],PowerUpEscudo PowerEscudo[], int tamanho, Nave Player){
for(int i=0; i< tamanho; i++) {
if (Enemy[i].activo) {
if (Colision(Enemy[i].x, Enemy[i].y, Enemy[i].bordeX, Enemy[i].bordeY, Player.getX() + 10, Player.getY() + 10, Player.getBordeX(), Player.getBordeY())) {
if(PowerEscudo->paraUsar==true){
cout<<"Tienes escudo activado"<<endl;
Enemy[i].activo = false;
}else{
Player.setVidas(Player.getVidas() - 1);
cout << "Te quedan: " << Player.getVidas() << " vidas" << endl;
al_play_sample(DeadNave,1.0, 0.0,1.0,ALLEGRO_PLAYMODE_ONCE,NULL);
Enemy[i].activo = false;
}
PowerEscudo->paraUsar=false;
}
}
}
return Player.getVidas();
}<file_sep>//
// Created by gerardo on 30/03/17.
//
#ifndef AIRWAR_ENEMIGO_H
#define AIRWAR_ENEMIGO_H
#include <allegro5/allegro.h>
#include <allegro5/allegro_image.h>
class Enemigo{
private:
int X;
int Y;
int Tipo;
public:
};
#endif //AIRWAR_ENEMIGO_H
<file_sep>#include <iostream>
#include<string.h>
#include<sys/types.h>
#include<sys/socket.h>
#include<netinet/in.h>
#include<arpa/inet.h>
#include<stdlib.h>
#include<unistd.h>
using namespace std;
int main()
{
int client, server;
int portnum = 8081;
bool isExit = false;
int bufsize = 256;
char buffer[bufsize];
char contentBuffer[bufsize-1];
struct sockaddr_in server_addr,client_addr;
socklen_t size;
int n;
//init socekt
server = socket(AF_INET, SOCK_STREAM, 0);
if(server < 0){
cout << "Error Estableciendo la conexion" << endl;
}
cout <<"server Socket conexion creada" << endl;
bzero((char *) &server_addr, sizeof(server_addr));
server_addr.sin_family = AF_INET;
server_addr.sin_addr.s_addr = INADDR_ANY;
server_addr.sin_port = htons(portnum);
//biding soket
if(bind(server,(struct sockaddr*)&server_addr,sizeof(server_addr)) < 0){
cout << "Error Biding Socket" << endl;
exit(1);
}
cout << "buscando clientes" << endl;
listen(server,1);//escuchando sokets
size= sizeof(client_addr);
//accept client
client = accept(server, (struct sockaddr*)&client_addr, &size);
if(client< 0){
cout<< "Error al Aceptar" << endl;
exit(1);
}
while(server >0){
//strcpy(buffer,"server conectado---\n");
//send(server,buffer,bufsize,0);
cout <<"conectado con el cliente" << endl;
cout <<"Esperando mensaje de cliente... "<<endl;
bzero(buffer,256);
bzero(contentBuffer,255);
n = read(client,buffer,255 );
if (n < 0)
{
perror("ERROR reading from socket");
return(1);
}
strncpy(contentBuffer,buffer,strlen(buffer) - 1);
cout<<"Mensaje: "<<endl;
cout<<buffer;
if (n < 0)
{
perror("ERROR writing to socket");
return(1);
}else if(n==6){
cout<<"Avion hacia la izquierda"<<endl;
}else if(n==7){
cout<<"Avion hacia la derecha"<<endl;
}
}
close(server);
return 0;
}<file_sep>//
// Created by gerardo on 29/03/17.
//
#include "Bala.h"
#include <allegro5/allegro.h>
#include <allegro5/allegro_image.h>
Bala::Bala(int _X, int _Y, int _Pot) {
BalaX = _X;
BalaY = _Y;
Pot = _Pot;
}
void Bala::DisparaNave(){
BalaY -= 8;
}
ALLEGRO_BITMAP* Bala::Dibujar(char *name){
ALLEGRO_BITMAP *Skin;
Skin = al_load_bitmap(name);
return Skin;
}<file_sep>//
// Created by gerardo on 09/04/17.
//
#ifndef AIRWAR_OBJECTS_H
#define AIRWAR_OBJECTS_H
struct Proyectil{
int x;
int y;
int dir;
int destX;
int destY;
int potencia;
int bordeX;
int bordeY;
int velocidad;
bool activo;
};
struct Enemigo{
int type;
int x;
int y;
int potencia;
int velocidad;
int salud;
int bordeX;
int bordeY;
bool activo;
bool shot;
};
struct PowerUpBala{
int x;
int y;
int dir;
int destX;
int destY;
int potencia;
int bordeX;
int bordeY;
int velocidad;
bool activo;
bool paraUsar;
};
struct PowerUpEscudo{
int x;
int y;
int dir;
int destX;
int destY;
int potencia;
int bordeX;
int bordeY;
int velocidad;
bool activo;
bool paraUsar;
};
#endif //AIRWAR_OBJECTS_H
<file_sep>//
// Created by gerardo on 21/03/17.
//
#include <iostream>
#include <allegro5/allegro.h>
#include <allegro5/allegro_image.h>
#include <pthread.h>
#include "Objects/Nave.h"
#include "Objects/Bala.h"
using namespace std;
char *NaveName = "/home/gerardo/CLionProjects/AirWar++/images/Nave.png";
char *BalaName = "/home/gerardo/CLionProjects/AirWar++/images/Bala.png";
int Y =-1400;
enum GAME_KEYS
{
KEY_LEFT,
KEY_RIGHT,
KEY_UP,
KEY_DOWN,
KEY_P,
KEY_SPACE
};
int key[] = { 0, 0, 0, 0, 0, 0};
ALLEGRO_DISPLAY *display;
ALLEGRO_EVENT_QUEUE *evento;
ALLEGRO_TIMER *timer;
ALLEGRO_BITMAP *Fondo;
int main(){
int repaint = 1;
int repaintbala = 0;
al_init();
al_init_image_addon();
al_install_keyboard();
//Objetos
Nave Player = Nave(50,380);
Nave Player2 = Nave(50,0);
for(int i= )
Bala BalaNave(Player.getX()+45,Player.getY()+15,2);
Fondo = al_load_bitmap("/home/gerardo/CLionProjects/AirWar++/images/Textura.jpg");
display = al_create_display(650,480);
evento = al_create_event_queue();
timer = al_create_timer(1.0 / 60);
al_register_event_source(evento, al_get_display_event_source(display));
al_register_event_source(evento, al_get_timer_event_source(timer));
al_register_event_source(evento, al_get_keyboard_event_source());
al_start_timer(timer);
while (1)
{
ALLEGRO_EVENT event;
al_wait_for_event(evento, &event);
if (event.type == ALLEGRO_EVENT_DISPLAY_CLOSE)
break;
if (event.type == ALLEGRO_EVENT_KEY_DOWN)
{
if (event.keyboard.keycode == ALLEGRO_KEY_LEFT)
key[KEY_LEFT] = 1;
if (event.keyboard.keycode == ALLEGRO_KEY_RIGHT)
key[KEY_RIGHT] = 1;
if (event.keyboard.keycode == ALLEGRO_KEY_UP)
key[KEY_UP] = 1;
if (event.keyboard.keycode == ALLEGRO_KEY_DOWN)
key[KEY_DOWN] = 1;
if (event.keyboard.keycode == ALLEGRO_KEY_SPACE){
repaintbala = 1;
BalaNave.setBalaX(Player.getX()+45);
BalaNave.setBalaY(Player.getY()+15);
}
if (event.keyboard.keycode == ALLEGRO_KEY_P)
key[KEY_P] = 1;
}
if (event.type == ALLEGRO_EVENT_KEY_UP)
{
if (event.keyboard.keycode == ALLEGRO_KEY_LEFT)
key[KEY_LEFT] = 0;
if (event.keyboard.keycode == ALLEGRO_KEY_RIGHT)
key[KEY_RIGHT] = 0;
if (event.keyboard.keycode == ALLEGRO_KEY_UP)
key[KEY_UP] = 0;
if (event.keyboard.keycode == ALLEGRO_KEY_DOWN)
key[KEY_DOWN] = 0;
if (event.keyboard.keycode == ALLEGRO_KEY_P)
key[KEY_P] = 0;
}
if (event.type == ALLEGRO_EVENT_TIMER){
if (key[KEY_LEFT] && Player.getX()>=0)
Player.setX(Player.getX()-4);
if (key[KEY_RIGHT] && Player.getX()<=550)
Player.setX(Player.getX()+4);;
if (key[KEY_UP] && Player.getY()>=0)
Player.setY(Player.getY()-4);
if (key[KEY_DOWN]&& Player.getY()<=400)
Player.setY(Player.getY()+4);
if (repaintbala){
if (BalaNave.getBalaY()>-30){
BalaNave.DisparaNave();
}
else
repaintbala = 0;
}
repaint = 1;
}
if (repaint){
if (repaint){
al_draw_bitmap(Fondo,0,Y,0);
Y+=4;
if (Y == 0)
Y = -1400;
}
if (repaintbala)
al_draw_bitmap(BalaNave.Dibujar(BalaName),BalaNave.getBalaX(),BalaNave.getBalaY(),0);
al_draw_bitmap(Player.Dibujar(NaveName),Player.getX(),Player.getY(),0);
al_draw_bitmap(Player2.Dibujar(NaveName),Player2.getX(),Player2.getY(),ALLEGRO_FLIP_VERTICAL);
if (key[KEY_P]){
NaveName = "/home/gerardo/CLionProjects/AirWar++/images/Boom.png";
}
al_flip_display();
repaint = 0;
}
}
al_destroy_bitmap(Fondo);
al_destroy_event_queue(evento);
al_destroy_timer(timer);
return 0;
}<file_sep>file(REMOVE_RECURSE
"CMakeFiles/scores.dir/main.cpp.o"
"scores.pdb"
"scores"
)
# Per-language clean rules from dependency scanning.
foreach(lang CXX)
include(CMakeFiles/scores.dir/cmake_clean_${lang}.cmake OPTIONAL)
endforeach()
<file_sep>cmake_minimum_required(VERSION 3.6)
project(AirWar__)
set(CMAKE_CXX_STANDARD 11)
#Include Allegro
INCLUDE_DIRECTORIES( usr/allegro5/include )
LINK_DIRECTORIES( usr/allegro5/include/lib )
#connect all the libraries you need
set(game_LIBS liballegro.so liballegro_image.so liballegro_audio.so liballegro_acodec.so liballegro_primitives.so liballegro_font.so liballegro_ttf.so)
set(SOURCE_FILES main.cpp Objects/Nave.cpp Objects/Nave.h Objects/Jugador.h Objects/Objects.h)
add_executable(AirWar__ ${SOURCE_FILES})
target_link_libraries(AirWar__ ${game_LIBS})
<file_sep>//
// Created by gerardo on 28/03/17.
//
#ifndef AIRWAR_NAVE_H
#define AIRWAR_NAVE_H
#include <allegro5/allegro.h>
#include <allegro5/allegro_image.h>
class Nave{
private:
int X;
int Y;
int BordeX =73;
int BordeY=63;
int orientation=2;
int Salud=100;
int Ndiparos = 0;
int Puntaje = 0;
int Vidas = 10;
int Velocidad = 5;
int PowerUps=0; //esto es nuevo
public:
Nave(int _X,int _Y);
int getVidas(){ return Vidas;}
int getVelocidad(){ return Velocidad;}
int getX(){ return X;}
int getOR(){return orientation;}
int getBordeX(){ return BordeX;}
int getY(){ return Y;}
int getBordeY(){ return BordeY;}
int getSalud(){ return Salud;}
void setSalud(int sal){Nave::Salud = sal;}
void setOR(int OR){Nave::orientation = OR;}
void setY(int Y){Nave::Y = Y;}
void setX(int X){Nave::X = X;}
void setVidas(int Vida){ Nave::Vidas = Vida;}
void setVelocidad(int Vel){ Nave::Velocidad = Vel;}
int getPowerUps(){ return PowerUps;} //esto es nuevo
void setPowerUps(int Power){ Nave::PowerUps = Power;} //esto es nuevo
ALLEGRO_BITMAP *Dibujar(char name[]);
};
#endif //AIRWAR_NAVE_H
<file_sep>#include <iostream>
#include <fstream>
using namespace std;
void guardar(char*,int);
void ordenar(char*, int);
int main(){
char Nombre[50]="rata";
int puntaje=567;
ordenar(Nombre,puntaje);
}
void guardar(char* name,int puntaje){
ofstream Guardar;
Guardar.open("Datos.txt",ios::app);
Guardar<<endl<<name<<" "<<puntaje<<endl;
}
void ordenar(char* nombre,int puntaje){
FILE *miarchivo;
char * archivo="Datos.txt";
char nprimero[80],sprim[80],nsegundo[80],sseg[80],ntercero[80],sterc[80],ncuarto[80],scuar[80],nquinto[80],squi[80];
miarchivo=fopen(archivo,"r");
fscanf(miarchivo,"%s",&nprimero);
fscanf(miarchivo,"%s",&sprim);
int num=atoi(sprim);
fscanf(miarchivo,"%s",&nsegundo);
fscanf(miarchivo,"%s",&sseg);
int num2=atoi(sseg);
fscanf(miarchivo,"%s",&ntercero);
fscanf(miarchivo,"%s",&sterc);
int num3=atoi(sterc);
fscanf(miarchivo,"%s",&ncuarto);
fscanf(miarchivo,"%s",&scuar);
int num4=atoi(scuar);
fscanf(miarchivo,"%s",&nquinto);
fscanf(miarchivo,"%s",&squi);
int num5=atoi(squi);
if (puntaje>num5 && puntaje<num4){
guardar(nprimero,num);
guardar(nsegundo,num2);
guardar(ntercero,num3);
guardar(ncuarto,num4);
guardar(nombre,puntaje);
}else if (puntaje>num4 && puntaje<num3){
guardar(nprimero,num);
guardar(nsegundo,num2);
guardar(ntercero,num3);
guardar(nombre,puntaje);
guardar(ncuarto,num4);
}else if (puntaje>num3 && puntaje<num2){
guardar(nprimero,num);
guardar(nsegundo,num2);
guardar(nombre,puntaje);
guardar(ntercero,num3);
guardar(ncuarto,num4);
}else if (puntaje>num2 && puntaje<num){
guardar(nprimero,num);
guardar(nombre,puntaje);
guardar(nsegundo,num2);
guardar(ntercero,num3);
guardar(ncuarto,num4);
}else if (puntaje>num){
guardar(nombre,puntaje);
guardar(nprimero,num);
guardar(nsegundo,num2);
guardar(ntercero,num3);
guardar(ncuarto,num4);
}else{
guardar(nprimero,num);
guardar(nsegundo,num2);
guardar(ntercero,num3);
guardar(ncuarto,num4);
guardar(nquinto,num5);
}
}
<file_sep>//
// Created by gerardo on 28/03/17.
//
#include <iostream>
#include <allegro5/allegro.h>
#include <allegro5/allegro_image.h>
#include "Nave.h"
Nave::Nave(int _X,int _Y){
X = _X;
Y = _Y;
}
ALLEGRO_BITMAP* Nave::Dibujar(char name[]){
ALLEGRO_BITMAP *Skin;
Skin = al_load_bitmap(name);
return Skin;
}
<file_sep>package com.example.juand.control11;
import android.content.Intent;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
public class MainActivity extends AppCompatActivity {
Button btnPlay,btnScore;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
btnScore=(Button)findViewById(R.id.btnScores);
btnPlay=(Button)findViewById(R.id.btnPlay);
btnPlay.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent intent1=new Intent(MainActivity.this,ConnectActivity.class);
startActivity(intent1);
}
});
}
}
<file_sep>cmake_minimum_required(VERSION 3.6)
project(scores)
set(CMAKE_CXX_STANDARD 11)
set(SOURCE_FILES main.cpp)
add_executable(scores ${SOURCE_FILES})
|
f8cf5758ef1dc7dbe50aceec2df8a43ac5234197
|
[
"Java",
"CMake",
"C",
"C++"
] | 18 |
C++
|
DatosIIGNJ/AirWar
|
e990075c282ae012ef0ca66a783efe539904f65e
|
1102c45736de48c5aed0aa83076aff56aebeb62a
|
refs/heads/main
|
<repo_name>musaogural/CleanCode<file_sep>/java/src/ch04/proxy/basbakan2/Vatandas.java
package org.javaturk.cc.ch04.proxy.basbakan2;
public class Vatandas {
private Vekil vekil = new Vekil();
public String derdiniAnlat() {
vekil.dinle("Bir derdim var...");
return "Yasasiiinnn!!!";
}
public String isIste() {
vekil.dinle("Oglum'a iş bul...");
return "Yasasiiinnn!!!";
}
}
<file_sep>/java/src/ch02/coupling/account/account2/AccountAction.java
package org.javaturk.cc.ch02.coupling.account.account2;
import org.javaturk.cc.ch02.coupling.account.ex.InsufficentBalanceException;
public interface AccountAction {
public double act(double balance, double amount) throws InsufficentBalanceException;
}
<file_sep>/java/src/ch03/lsp/designByContract/taxi/VipTaxi.java
package org.javaturk.cc.ch03.lsp.designByContract.taxi;
public class VipTaxi extends Taxi {
public VipTaxi(String string) {
super(string);
}
public boolean checkPrecondition(Customer customer) {
return true;
}
@Override
public VipService service() {
return new VipService();
}
}
<file_sep>/java/src/ch03/isp/paper/collection/IEnumerator.java
package org.javaturk.cc.ch03.isp.paper.collection;
public interface IEnumerator {
}
<file_sep>/java/src/ch03/dip/paper/problem/CopyTest.java
package org.javaturk.cc.ch03.dip.paper.problem;
/**
* @author akin
*
*/
public class CopyTest {
public static void main(String[] args) {
Keyboard in = new Keyboard();
Printer out = new Printer();
Copy copy = new Copy(in, out);
copy.start();
}
}
<file_sep>/java/src/ch02/cohesion/temperature/functional/TemperatureConverter.java
package org.javaturk.cc.ch02.cohesion.temperature.functional;
public interface TemperatureConverter {
public double convert(TemperatureType source, TemperatureType target, double degree);
}
<file_sep>/java/src/ch03/lsp/designByContract/calculator/ex/NegativeArgumentException.java
package org.javaturk.cc.ch03.lsp.designByContract.calculator.ex;
public class NegativeArgumentException extends Exception {
}
<file_sep>/java/src/ch03/srp/customerService/solution/ex/NegativeAmountException.java
package org.javaturk.cc.ch03.srp.customerService.solution.ex;
public class NegativeAmountException extends Exception {
private static final String prefix = "Negative amount can't be provided. ";
public NegativeAmountException(String string) {
super(prefix + string);
}
public String getMessage(){
return prefix + super.getMessage();
}
}
<file_sep>/java/src/ch03/lsp/square/Geometry.java
package org.javaturk.cc.ch03.lsp.square;
public class Geometry {
public void draw(Rectangle rectangle) {
System.out.println("Area: " + rectangle.calculateArea());
}
}
<file_sep>/java/src/ch03/lsp/square/Square.java
package org.javaturk.cc.ch03.lsp.square;
public class Square extends Rectangle{
public Square() {
}
public Square(int side) {
super(side, side);
}
@Override
public void setShortSide(int shortSide) {
this.shortSide = shortSide;
this.longSide = shortSide;
}
@Override
public void setLongSide(int longSide) {
this.shortSide = longSide;
this.longSide = longSide;
}
}
<file_sep>/java/src/ch03/ocp/factory/solution/Factory.java
package org.javaturk.cc.ch03.ocp.factory.solution;
public interface Factory {
public Employee create();
}
<file_sep>/java/src/ch02/coupling/account/account1/Account.java
package org.javaturk.cc.ch02.coupling.account.account1;
import org.javaturk.cc.ch02.coupling.account.ex.InsufficentBalanceException;
import org.javaturk.cc.ch02.coupling.account.ex.NegativeAmountException;
public class Account {
private double balance;
public void changeBalance(String action, double amount)
throws InsufficentBalanceException, NegativeAmountException {
if (amount < 0)
throw new NegativeAmountException(amount);
if (action.equals("Deposit"))
balance += amount;
else if (action.equals("Withdraw")) {
if (balance >= amount)
balance -= amount;
else
throw new InsufficentBalanceException(action, balance, amount);
}
// log.info(action + " : " + amount + " for account id: " + id);
}
}
<file_sep>/java/src/ch03/srp/customerService/solution/ex/CustomerLockedException.java
package org.javaturk.cc.ch03.srp.customerService.solution.ex;
public class CustomerLockedException extends Exception {
public CustomerLockedException(String message){
super(message);
}
}
<file_sep>/java/src/ch02/coupling/atm/CardReader.java
package org.javaturk.cc.ch02.coupling.atm;
public class CardReader {
public static final int NO_CARD = 0;
public static final int CARD_INSIDE = 1;
public static final int CARD_EJECTED = 2;
// ...
}
<file_sep>/java/src/ch02/anemic/account2/AmountValidator.java
package org.javaturk.cc.ch02.anemic.account2;
import org.javaturk.cc.ch02.coupling.account.ex.NegativeAmountException;
public class AmountValidator {
public void validate(double amount) throws NegativeAmountException {
if(amount < 0)
throw new NegativeAmountException(amount);
}
}
<file_sep>/java/src/ch04/proxy/basbakan3/BasbakanlikKalemi.java
package org.javaturk.cc.ch04.proxy.basbakan3;
public class BasbakanlikKalemi {
private Basbakan basbakan;
public BasbakanlikKalemi(Basbakan basbakan){
this.basbakan = new VekilBasbakan(basbakan);
}
public Basbakan banaBasbakaniVer(){
System.out.println("Başbakanlık Kalemi: Tabi efendim");
return basbakan;
}
}
<file_sep>/java/src/ch03/srp/task/Sorter.java
package org.javaturk.cc.ch03.srp.task;
import java.util.Collection;
public interface Sorter<T> {
public void sort(Collection<T> collection);
}
<file_sep>/java/src/ch03/isp/paper/collection/Test.java
package org.javaturk.cc.ch03.isp.paper.collection;
import java.util.*;
public class Test {
public static void main(String[] args) {
Collection col = null;
}
}
<file_sep>/java/src/ch02/cohesion/address/PostalCode.java
package org.javaturk.cc.ch02.cohesion.address;
public class PostalCode {
private String code;
}
<file_sep>/java/src/ch03/srp/customerService/solution/dao/CustomerDAOI.java
package org.javaturk.cc.ch03.srp.customerService.solution.dao;
import org.javaturk.cc.ch03.srp.domain.Customer;
import org.javaturk.cc.ch03.srp.customerService.solution.ex.*;
public interface CustomerDAOI {
/**
* Method createCustomer.
* @param customer Customer
* @return boolean
* @throws CustomerAlreadyExistsException
*/
public boolean createCustomer(Customer customer) throws CustomerAlreadyExistsException;
/**
* Method retrieveCustomer.
* @param tckn String
* @return Customer
* @throws NoSuchCustomerException
*/
public Customer retrieveCustomer(String tckn) throws NoSuchCustomerException;
/**
* Method updateCustomer.
* @param customer Customer
* @return boolean
* @throws NoSuchCustomerException
*/
public boolean updateCustomer(Customer customer) throws NoSuchCustomerException;
/**
* Method deleteCustomer.
* @param customer Customer
* @return boolean
* @throws NoSuchCustomerException
*/
public boolean deleteCustomer(Customer customer) throws NoSuchCustomerException;
/**
* Method refreshCustomer.
* @param customer Customer
* @return Customer
* @throws NoSuchCustomerException
*/
public Customer refreshCustomer(Customer customer) throws NoSuchCustomerException;
}
<file_sep>/java/src/ch03/srp/customerService/solution/util/ATMProperties.java
package org.javaturk.cc.ch03.srp.customerService.solution.util;
import java.util.Properties;
public class
ATMProperties {
private static Properties properties;
public static Properties getProperties() {
return properties;
}
public static void setProperties(Properties properties) {
ATMProperties.properties = properties;
}
public static void setProperty(String name, String property) {
properties.setProperty(name, property);
}
public static String getProperty(String name) {
return properties.getProperty(name);
}
public static void setProperyFilePath(String string) {
// TODO Auto-generated method stub
}
}
<file_sep>/java/src/ch02/cohesion/address/Adres.java
package org.javaturk.cc.ch02.cohesion.address;
public class Adres {
private String daire;
private String kat;
private String kapi;
private String binaAdi;
private String blok;
private String sokak;
private String cadde;
private String ilce;
private String il;
private String postaKodu;
}
<file_sep>/java/src/ch03/isp/log/bad/Logger.java
package org.javaturk.cc.ch03.isp.log.bad;
import java.io.File;
import org.javaturk.cc.ch03.isp.log.Log;
public interface Logger {
void log(Log log);
void openConnection();
void closeConnection();
void openFile(File file, boolean create);
void closeFile();
}
<file_sep>/java/src/ch03/srp/paper/gui/solution/ComputationaGeometryApplication.java
package org.javaturk.cc.ch03.srp.paper.gui.solution;
public class ComputationaGeometryApplication {
private Rectangle rectangle;
}
<file_sep>/java/src/ch03/lsp/square/Rectangle.java
package org.javaturk.cc.ch03.lsp.square;
public class Rectangle {
protected int shortSide;
protected int longSide;
public Rectangle() {
}
public Rectangle(int shortSide, int longSide) {
this.shortSide = shortSide;
this.longSide = longSide;
}
public double calculateArea() {
return shortSide * longSide;
}
public double calculateCircumference() {
return 2 * (shortSide + longSide);
}
public int getShortSide() {
return shortSide;
}
public void setShortSide(int shortSide) {
this.shortSide = shortSide;
}
public int getLongSide() {
return longSide;
}
public void setLongSide(int longSide) {
this.longSide = longSide;
}
}
<file_sep>/java/src/ch03/srp/customerService/solution/util/PasswordValidator.java
package org.javaturk.cc.ch03.srp.customerService.solution.util;
import org.javaturk.cc.ch03.srp.customerService.solution.ex.*;
public class PasswordValidator implements Validator{
/**
* This method validates password of the customer.
*
* @param password Password of the customer.
* @throws NoProperPasswordException Throws when the rules regarding the password are broken.
*/
@Override
public void validate(String password) throws NoProperPasswordException {
if (password == null | password.length() == 0)
throw new NoProperPasswordException("Empty password not allowed.");
int passwordMinLength = Integer.parseInt(ATMProperties.getProperty("customer.password.length.min"));
int passwordMaxLength = Integer.parseInt(ATMProperties.getProperty("customer.password.length.max"));
if (password.length() < passwordMinLength || password.length() > passwordMaxLength)
throw new NoProperPasswordException("Password must have at least " + passwordMinLength + " and at most "
+ passwordMaxLength + " characters");
if (ATMProperties.getProperty("customer.password.checkDetails").equals("yes")) {
// Now check if it has at least one char and one digit.
boolean b1 = false;
boolean b2 = false;
int length = password.length();
// Letter check
for (int i = 0; i < length; i++) {
char c = password.charAt(i);
if (Character.isLetter(c)) {
b1 = true;
break;
}
}
// Digit check
for (int i = 0; i < length; i++) {
char c = password.charAt(i);
if (Character.isDigit(c)) {
b2 = true;
break;
}
}
// If one of them is false throw an exception.
if (!(b1 & b2))
throw new NoProperPasswordException("The password you provided doesn't satisfies the rules: "
+ password);
}
}
}
<file_sep>/java/src/ch03/lsp/designByContract/taxi/Test.java
package org.javaturk.cc.ch03.lsp.designByContract.taxi;
public class Test {
public static void main(String[] args) {
Customer customer1 = new Customer("Akin", 800);
Taxi taxi = new Taxi("Zafer");
customer1.call(taxi);
Service service = taxi.service();
System.out.println(service);
Customer customer2 = new Customer("Yasemin", 1200);
Taxi vipTaxi = new VipTaxi("Cemil");
customer2.call(vipTaxi);
Service vipService = vipTaxi.service();
System.out.println(vipService);
}
}
<file_sep>/java/src/ch03/srp/customerService/problem/CustomerService.java
package org.javaturk.cc.ch03.srp.customerService.problem;
import java.util.logging.Logger;
import org.javaturk.cc.ch03.srp.customerService.solution.dao.CustomerDAOI;
import org.javaturk.cc.ch03.srp.customerService.solution.ex.*;
import org.javaturk.cc.ch03.srp.customerService.solution.util.ATMLogger;
import org.javaturk.cc.ch03.srp.customerService.solution.util.ATMProperties;
import org.javaturk.cc.ch03.srp.domain.Account;
import org.javaturk.cc.ch03.srp.domain.Customer;
public class CustomerService {
private CustomerDAOI customerDao;
// Logged in customer
private Customer currentCustomer;
private int loginAttemptCount;
protected Logger logger;
public CustomerService() {
logger = ATMLogger.getLogger();
}
public void createCustomer(Customer customer) {
try {
customerDao.createCustomer(customer);
} catch (CustomerAlreadyExistsException e) {
logger.severe("Customer already exist: " + e.getMessage());
}
}
/**
* Retrieved the customer for given tckn from the db.
* @return Customer reteieved
* @throws NoSuchCustomerException Throws when no such customer for given tckn found
*/
public Customer retrieveCustomer(String tckn) throws NoSuchCustomerException {
Customer customer = customerDao.retrieveCustomer(tckn);
return customer;
}
public Customer refreshCustomer(Customer customer) throws NoSuchCustomerException {
return customerDao.refreshCustomer(customer);
}
public void login(String tckn, String password)
throws NoSuchCustomerException, CustomerLockedException, CustomerAlreadyLoggedException, WrongCustomerCredentialsException,
MaxNumberOfFailedLoggingAttemptExceededException, ImproperCustomerCredentialsException, NoProperPasswordException {
// First validate parameters
validateTckn(tckn);
validatePassword(password);
// Now retrieve the customer from the db
Customer customer = customerDao.retrieveCustomer(tckn);
// If passwords match, customer hasn't already been locked nor logged in
// Customer loggs in and it is now the currentCustomer
if (customer.getPassword().equals(password) & !customer.isLocked() & !customer.isLoggedIn()) {
customer.setLoggedIn(true);
// customer.logsin is a property in atm.properties. If it is "yes"
// database is updated when
// a customer logs in. Updated part in db is CUSTOMERS.LOGGEDIN
if (ATMProperties.getProperty("customer.logsin").equals("yes")) {
logger.info(customer.getFirstName() + " " + customer.getLastName() + " logs in.");
customerDao.updateCustomer(customer);
}
currentCustomer = customer;
} else if (customer.isLoggedIn()) {
throw new CustomerAlreadyLoggedException("Customer is already logged in. Please first log out.");
} else if (customer.isLocked()) {
throw new CustomerLockedException("Customer is locked. Please consult your admin.");
} else if (!customer.getPassword().equals(password)) {
loginAttemptCount++;
if (loginAttemptCount == Integer.parseInt(ATMProperties.getProperty("customer.maxFailedLoginAttempt"))) {
customer.setLocked(true);
customerDao.updateCustomer(customer);
loginAttemptCount = 0;
throw new MaxNumberOfFailedLoggingAttemptExceededException("Max number of login attempt reached: " + loginAttemptCount);
}
else
throw new WrongCustomerCredentialsException("TCKN/password is wrong.");
}
}
public boolean logout(Customer customer) {
boolean logout = false;
customer.setLoggedIn(false);
try {
logout = customerDao.updateCustomer(customer);
logout = true;
logger.info("Customer logging out: " + customer);
} catch (NoSuchCustomerException e) {
logger.severe("Problem when logging out the customer " + e.getMessage());
}
// It should point to null
customer = null;
return logout;
}
public boolean lockCustomer(Customer customer) {
boolean lock = false;
customer.setLocked(true);
try {
lock = customerDao.updateCustomer(customer);
logger.info("Customer locked: " + customer);
} catch (NoSuchCustomerException e) {
logger.severe("Problem when locking the customer " + e.getMessage());
}
return lock;
}
public Account getDefaultAccount(Customer customer) {
return customer.getDefaultAccount();
}
public boolean changePassword(Customer customer, String password) throws NoProperPasswordException {
boolean change = false;
validatePassword(password);
customer.setPassword(password);
try {
change = customerDao.updateCustomer(customer);
} catch (NoSuchCustomerException e) {
logger.severe("Problem when updating the password of the customer " + e.getMessage());
}
logger.info("Customer changed the password: " + customer);
return change;
}
/**
* Retrieved current logged-in customer.
* @return Current logged-in customer
*/
public Customer getCurrentCustomer() {
return currentCustomer;
}
private boolean validatePassword(String password) throws NoProperPasswordException {
boolean b1 = false;
boolean b2 = false;
int length = password.length();
// Length check
if (length < 6)
throw new NoProperPasswordException("The password you provided doesn't satisfies the rules: " + password);
// Letter check
for (int i = 0; i < length; i++) {
char c = password.charAt(i);
if (Character.isLetter(c)) {
b1 = true;
break;
}
}
// Digit check
for (int i = 0; i < length; i++) {
char c = password.charAt(i);
if (Character.isDigit(c)) {
b2 = true;
break;
}
}
// If both hold true
if (b1 & b2)
return true;
else
throw new NoProperPasswordException("The password you provided doesn't satisfies the rules: " + password);
}
/**
* This method validates tckn of the customer.
*
* @param tckn Tckn of the customer.
* @throws ImproperCustomerCredentialsException Throws when the rules regarding the tckn are broken.
*/
private boolean validateTckn(String tckn) throws ImproperCustomerCredentialsException {
boolean b = true;
if (tckn == null | tckn.length() == 0)
throw new ImproperCustomerCredentialsException("Empty TCKN not allowed.");
String length = ATMProperties.getProperty("customer.tckn.length");
int tcknLength = Integer.parseInt(length);
if (tckn.length() < tcknLength)
throw new ImproperCustomerCredentialsException("TCKN must have " + tcknLength + " characters.");
return b;
}
}
<file_sep>/java/src/ch02/anemic/account2/AccountService.java
package org.javaturk.cc.ch02.anemic.account2;
import org.javaturk.cc.ch02.coupling.account.ex.InsufficentBalanceException;
import org.javaturk.cc.ch02.coupling.account.ex.NegativeAmountException;
public class AccountService {
private static final double EFT_CHARGE = 3;
private AmountValidator validator = new AmountValidator();
public void withdraw(Account account, double amount) throws InsufficentBalanceException, NegativeAmountException {
validator.validate(amount);
account.withdraw(amount);
}
public void deposit(Account account, double amount) throws InsufficentBalanceException, NegativeAmountException {
validator.validate(amount);
account.deposit(amount);
}
public void eft(Account sourceAccount, Account targetAccount, double amount) throws InsufficentBalanceException, NegativeAmountException {
validator.validate(amount);
sourceAccount.withdraw(amount);
sourceAccount.withdraw(EFT_CHARGE);
targetAccount.deposit(amount);
}
}
<file_sep>/java/src/ch02/cohesion/temperature/procedural/v1/TemperatureConverter.java
package org.javaturk.cc.ch02.cohesion.temperature.procedural.v1;
public class TemperatureConverter {
public double convert(char source, char target, double degree) {
double result = 0.0;
if (source == 'c') {
if (target == 'f')
result = (degree * 9 / 5) + 32;
else if (target == 'k')
result = degree + 273.15;
else if (target == 'c')
result = degree;
} else if (source == 'f') {
if (target == 'k')
result = (degree - 32) * 5 / 9;
else if (target == 'k')
result = (degree - 32) * 5 / 9 + 273.15;
else if (target == 'f')
result = degree;
} else if (source == 'k') {
if (target == 'f')
result = (degree - 273.15) * 9 / 5 + 32;
else if (target == 'c')
result = degree - 273.15;
else if (target == 'k')
result = degree;
}
return result;
}
}
<file_sep>/java/src/ch03/srp/customerService/solution/util/TcknValidator.java
package org.javaturk.cc.ch03.srp.customerService.solution.util;
import org.javaturk.cc.ch03.srp.customerService.solution.ex.*;
public class TcknValidator implements Validator{
/**
* This method validates tckn of the customer.
*
* @param tckn Tckn of the customer.
* @throws ImproperCustomerCredentialsException Throws when the rules regarding the tckn are broken.
*/
@Override
public void validate(String tckn) throws ImproperCustomerCredentialsException {
if (tckn == null | tckn.length() == 0)
throw new ImproperCustomerCredentialsException("Empty TCKN not allowed.");
String length = ATMProperties.getProperty("customer.tckn.length");
int tcknLength = Integer.parseInt(length);
if (tckn.length() < tcknLength)
throw new ImproperCustomerCredentialsException("TCKN must have " + tcknLength + " characters.");
}
}
<file_sep>/java/src/ch03/srp/customerService/solution/service/CustomerService.java
package org.javaturk.cc.ch03.srp.customerService.solution.service;
import java.io.Serializable;
import org.javaturk.cc.ch03.srp.customerService.solution.dao.CustomerDAOI;
import org.javaturk.cc.ch03.srp.customerService.solution.ex.*;
import org.javaturk.cc.ch03.srp.customerService.solution.util.Validator;
import org.javaturk.cc.ch03.srp.domain.*;
/**
*/
public class CustomerService extends AbstractService implements CustomerServiceI, Serializable {
private CustomerDAOI customerDao;
private Validator passwordValidator;
// Logged in customer
private Customer currentCustomer;
public CustomerService() {
}
@Override
public void checkIfCustomerAlreadyLoggedIn(Customer customer) throws CustomerAlreadyLoggedException {
if (customer.isLoggedIn()) {
throw new CustomerAlreadyLoggedException("Customer is already logged in. Please first log out.");
}
}
@Override
public void checkIfCustomerLocked(Customer customer) throws CustomerLockedException {
if (customer.isLocked()) {
throw new CustomerLockedException("Customer is locked. Please consult your admin.");
}
}
/**
* Method lockCustomer.
*
* @param customer
* Customer
* @return boolean
* @see org.javaturk.dp.principles.srp.solution.service.teachmejava.atm.service.interfaces.CustomerServiceI#lockCustomer(Customer)
*/
@Override
public boolean lockCustomer(Customer customer) {
boolean lock = false;
customer.setLocked(true);
try {
lock = customerDao.updateCustomer(customer);
} catch (NoSuchCustomerException e) {
logger.severe("Problem when locking the customer " + e.getMessage());
e.printStackTrace();
}
return lock;
}
/**
* Method createCustomer.
*
* @param customer
* Customer
* @see org.javaturk.dp.principles.srp.solution.service.teachmejava.atm.service.interfaces.CustomerServiceI#createCustomer(Customer)
*/
@Override
public void createCustomer(Customer customer) {
// TODO Auto-generated method stub
}
/**
* Method getDefaultAccount.
*
* @param customer
* Customer
* @return Account
* @see org.javaturk.dp.principles.srp.solution.service.teachmejava.atm.service.interfaces.CustomerServiceI#getDefaultAccount(Customer)
*/
@Override
public Account getDefaultAccount(Customer customer) {
// TODO Auto-generated method stub
return null;
}
/**
* Changes the password.
*
* @param customer Customer
* @param password <PASSWORD>
* @return boolean true if the password has been changed successfuly.
* @throws ValidationException
* @see org.javaturk.dp.principles.srp.solution.service.teachmejava.atm.service.interfaces.CustomerServiceI#changePassword(Customer,
* String)
*/
@Override
public boolean changePassword(Customer customer, String password) throws ValidationException {
boolean change = false;
// First validate the password and then change it.
passwordValidator.validate(password);
customer.setPassword(<PASSWORD>);
try {
change = customerDao.updateCustomer(customer);
} catch (NoSuchCustomerException e) {
logger.severe("Problem when updating the password of the customer " + e.getMessage());
e.printStackTrace();
}
return change;
}
/**
*
* @return Current logged-in customer * @see
* com.teachmejava.atm.service.interfaces
* .CustomerServiceI#getCurrentCustomer()
*/
public Customer getCurrentCustomer() {
return currentCustomer;
}
/**
* Method refreshCustomer.
*
* @param customer
* Customer
* @return Customer
* @throws NoSuchCustomerException
* @see org.javaturk.dp.principles.srp.solution.service.teachmejava.atm.service.interfaces.CustomerServiceI#refreshCustomer(Customer)
*/
@Override
public Customer refreshCustomer(Customer customer) throws NoSuchCustomerException {
return customerDao.refreshCustomer(customer);
}
}
<file_sep>/java/src/ch03/srp/customerService/solution/util/Validator.java
package org.javaturk.cc.ch03.srp.customerService.solution.util;
import org.javaturk.cc.ch03.srp.customerService.solution.ex.*;
public interface Validator {
void validate(String string) throws ValidationException;
}
<file_sep>/java/src/ch02/coupling/account/account2/Account.java
package org.javaturk.cc.ch02.coupling.account.account2;
import org.javaturk.cc.ch02.coupling.account.ex.InsufficentBalanceException;
import org.javaturk.cc.ch02.coupling.account.ex.NegativeAmountException;
public class Account {
private double balance;
public void changeBalance(AccountAction action, double amount)
throws InsufficentBalanceException, NegativeAmountException {
if (amount < 0)
throw new NegativeAmountException(amount);
balance = action.act(balance, amount);
// log.info(action + " : " + amount + " for account id: " + id);
}
}
<file_sep>/java/src/ch03/srp/task/Test.java
package org.javaturk.cc.ch03.srp.task;
import java.util.Collection;
public class Test {
public static void main(String[] args) {
SorterTaskFactory sorterTaskFactory = new SorterTaskFactory();
// Sorter
Sorter<Product> sorter = sorterTaskFactory.create();
Collection<Product> list = null;
// ...
sorter.sort(list);
// Thread
Thread sorterThread = (Thread) sorter;
sorterThread.run();
// Comparable
Comparable comparableSorter2 = (Comparable) sorterTaskFactory.create();
Comparable comparableSorter1 = (Comparable) sorter;
comparableSorter1.compareTo(comparableSorter2);
}
}
<file_sep>/java/src/ch04/proxy/basbakan1/Basbakan.java
package org.javaturk.cc.ch04.proxy.basbakan1;
public class Basbakan {
public void dertDinle(String dert) {
System.out.println("Basbakan: Dinliyorum.");
if(ayikla(dert))
coz(dert);
}
public void isBul(String yakinim) {
System.out.println("Basbakan: Bana boyle isteklerle gelmeyin.");
}
private boolean ayikla(String dert){
boolean b = true;
//...
return b;
}
private void coz(String dert) {
System.out.println("Basbakan: Bu derdi çözün: " + dert);
}
}
<file_sep>/java/src/ch03/srp/customerService/solution/ex/NoProperPasswordException.java
package org.javaturk.cc.ch03.srp.customerService.solution.ex;
public class NoProperPasswordException extends ValidationException {
public NoProperPasswordException(String message){
super(message);
}
}
<file_sep>/java/src/ch02/cohesion/temperature/functional/validator/TemperatureValidator.java
package org.javaturk.cc.ch02.cohesion.temperature.functional.validator;
import org.javaturk.cc.ch02.cohesion.temperature.functional.Temperature;
import org.javaturk.cc.ch02.cohesion.temperature.functional.TemperatureType;
import org.javaturk.cc.ch02.cohesion.temperature.functional.ex.InvalidTemperatureException;
public class TemperatureValidator implements Validator{
@Override
public void validate(Temperature temperature) throws InvalidTemperatureException {
double degree = temperature.getDegree();
TemperatureType type = temperature.getType();
if(type.equals(TemperatureType.CELSIUS) & degree < -273)
throw new InvalidTemperatureException("Celcius can't be lower than -273. Degree provided: " + degree);
if(type.equals(TemperatureType.KELVIN) & degree < 0)
throw new InvalidTemperatureException("Kelvin can't be lower than 0. Degree provided: " + degree);
if(type.equals(TemperatureType.FAHRENHEIT) & degree < -459.67)
throw new InvalidTemperatureException("Fahrenheit can't be lower than -459.67. Degree provided: " + degree);
}
}
<file_sep>/java/src/ch03/isp/log/good/Logger.java
package org.javaturk.cc.ch03.isp.log.good;
import java.io.File;
import org.javaturk.cc.ch03.isp.log.Log;
public interface Logger {
void log(Log log);
}
<file_sep>/java/src/ch04/proxy/basbakan3/Basbakan.java
package org.javaturk.cc.ch04.proxy.basbakan3;
public interface Basbakan {
public void dertDinle(String dert);
public void isBul(String yakinim);
}
<file_sep>/java/src/ch03/dip/paper/solution/Writer.java
package org.javaturk.cc.ch03.dip.paper.solution;
/**
* @author akin
*
*/
public interface Writer {
public void write(char ch);
public void close();
}
<file_sep>/java/src/ch03/lsp/designByContract/bad/Test.java
package org.javaturk.cc.ch03.lsp.designByContract.bad;
public class Test {
public static void main(String[] args) {
Calculator calculator = new Calculator();
testCalculator(calculator);
System.out.println("***********");
calculator = new PositiveCalculator();
testCalculator(calculator);
System.out.println("***********");
calculator = new PositiveCalculator();
testPositiveCalculator(calculator);
}
public static void testCalculator(Calculator calculator) {
int i = -3;
int j = 7;
int expected = i + j;
int result = calculator.add(i, j);
assert expected == result : "Problem! Needs to be " + expected + " but found " + result;
}
/**
* Requires RTTI to work with a PositiveCalculator correctly!
*
* @param rectangle
*/
public static void testPositiveCalculator(Calculator calculator) {
PositiveCalculator positiveCalculator = null;
if (calculator instanceof PositiveCalculator) {
positiveCalculator = (PositiveCalculator) calculator;
int i = -3;
int j = 7;
int expected = i + j;
int result = calculator.add(i, j);
assert expected == result : "Problem! Needs to be " + expected + " but found " + result;
}
}
}
<file_sep>/java/src/ch03/lsp/designByContract/taxi/Customer.java
package org.javaturk.cc.ch03.lsp.designByContract.taxi;
public class Customer {
private String name;
private int distance;
public Customer(String string, int distance) {
this.name = name;
this.distance = distance;
}
public int getDistance() {
return distance;
}
public String getName() {
return name;
}
public void call(Taxi taxi){
taxi.take(this);
}
public void pay(Taxi taxi){
System.out.println("paying taxi " + taxi.getName());
}
}
<file_sep>/java/src/ch02/cohesion/temperature/functional/TripleTemperatureConverter.java
package org.javaturk.cc.ch02.cohesion.temperature.functional;
import org.javaturk.cc.ch02.cohesion.temperature.functional.command.ConvertCommand;
public class TripleTemperatureConverter implements TemperatureConverter {
private ConvertCommand c2FConvert;
private ConvertCommand f2CConvert;
private ConvertCommand c2KConvert;
private ConvertCommand k2CConvert;
private ConvertCommand k2FConvert;
private ConvertCommand f2KConvert;
@Override
public double convert(TemperatureType source, TemperatureType target, double degree) {
double result = 0.0;
if (source == TemperatureType.CELSIUS) {
if (target == TemperatureType.FAHRENHEIT)
result = c2FConvert.execute(degree);
else if (target == TemperatureType.KELVIN)
result = c2KConvert.execute(degree);
else if (target == TemperatureType.CELSIUS)
result = degree;
} else if (source == TemperatureType.FAHRENHEIT) {
if (target == TemperatureType.CELSIUS)
result = f2CConvert.execute(degree);
else if (target == TemperatureType.KELVIN)
result = f2KConvert.execute(degree);
else if (target == TemperatureType.FAHRENHEIT)
result = degree;
} else if (source == TemperatureType.KELVIN) {
if (target == TemperatureType.FAHRENHEIT)
result = k2FConvert.execute(degree);
else if (target == TemperatureType.CELSIUS)
result = k2CConvert.execute(degree);
else if (target == TemperatureType.KELVIN)
result = degree;
}
return result;
}
}
<file_sep>/java/src/ch02/cohesion/address/Street.java
package org.javaturk.cc.ch02.cohesion.address;
public class Street {
private String name;
}
<file_sep>/java/src/ch03/srp/customerService/solution/ex/WrongCustomerCredentialsException.java
package org.javaturk.cc.ch03.srp.customerService.solution.ex;
public class WrongCustomerCredentialsException extends Exception {
public WrongCustomerCredentialsException(String message){
super(message);
}
}
<file_sep>/java/src/ch05/assertion/SqrtCalculator.java
package org.javaturk.cc.ch05.assertion;
public class SqrtCalculator {
public static void main(String[] args) {
SqrtCalculator sq = new SqrtCalculator();
System.out.println(sq.sqrt3(625));
System.out.println(sq.sqrt3(12));
System.out.println(sq.sqrt3(17));
}
public double sqrt1(int x) {
return Math.sqrt(x);
}
public double sqrt2(int x) {
if (x < 0)
throw new IllegalArgumentException("Negative number!" + x); //
else
return Math.sqrt(x);
}
public double sqrt3(int x) {
assert x >= 0 : "Negative number: " + x;
// Debug.assert(x >= 0, "Negative number: " + x);
return Math.sqrt(x);
}
}
<file_sep>/java/src/ch04/proxy/basbakan3/Test.java
package org.javaturk.cc.ch04.proxy.basbakan3;
public class Test {
public static void main(String[] args) {
System.err.println("*** Vatandaş başbakanlık kalemine gelir ve süreç başlar. ***");
Basbakan basbakan = new GercekBasbakan();
BasbakanlikKalemi kalem = new BasbakanlikKalemi(basbakan);
Vatandas riza = new Vatandas(kalem);
riza.derdiniAnlat();
riza.isIste();
}
}
<file_sep>/java/src/ch02/cohesion/temperature/functional/command/C2FConvert.java
package org.javaturk.cc.ch02.cohesion.temperature.functional.command;
public class C2FConvert implements ConvertCommand{
@Override
public double execute(double degree) {
double result = (degree * 9 / 5) + 32;
return result;
}
}<file_sep>/java/src/ch03/srp/paper/comm2/DataChannel.java
package org.javaturk.cc.ch03.srp.paper.comm2;
public interface DataChannel {
public void send(char c);
public char recv();
}
<file_sep>/java/src/ch03/srp/domain/TransactionType.java
package org.javaturk.cc.ch03.srp.domain;
public enum TransactionType {
TRANSFER,
DEPOSIT,
WITHDRAW,
EFT,
PAYMENT
}
<file_sep>/java/src/ch03/srp/customerService/solution/service/LoginServiceI.java
package org.javaturk.cc.ch03.srp.customerService.solution.service;
import org.javaturk.cc.ch03.srp.domain.Customer;
import org.javaturk.cc.ch03.srp.customerService.solution.ex.*;
public interface LoginServiceI {
/**
* Logs in the customer.
* @author akin
* @param tckn Tckn of the customer
* @param password <PASSWORD> of the customer
* @throws NoSuchCustomerException Throws when no customer with given tckn found
* @throws CustomerAlreadyLoggedException Throws when the customer has already been logged in
* @throws WrongCustomerCredentialsException Throws when the customer's provided apssword does not match the existing one
* @throws MaxNumberOfFailedLoggingAttemptExceededException Throws when the number of the customer's attemps to log in with wrong passwords exceeds a certain amount.
* @throws CustomerLockedException Throws when no customer is locked
* @throws NoProperPasswordException
* @throws ValidationException
*/
public void login(String tckn, String password) throws NoSuchCustomerException, CustomerAlreadyLoggedException, WrongCustomerCredentialsException,
MaxNumberOfFailedLoggingAttemptExceededException, CustomerLockedException, NoProperPasswordException, ValidationException;
/**
* Method to logout the customer.
* @author akin
* @param customer Customer to log out.
* @return boolean True when the customer successfully logs out.
*/
public boolean logout(Customer customer);
}
<file_sep>/java/src/ch03/dip/paper/problem/Printer.java
package org.javaturk.cc.ch03.dip.paper.problem;
import java.io.PrintStream;
public class Printer {
private PrintStream out = System.out;
public void write(char ch){
out.print(ch);
}
}
<file_sep>/java/src/ch03/srp/task/SorterTaskFactory.java
package org.javaturk.cc.ch03.srp.task;
public class SorterTaskFactory {
public Sorter<Product> create() {
return new SorterTask();
}
}
<file_sep>/java/src/ch03/isp/log/good/DBLogger.java
package org.javaturk.cc.ch03.isp.log.good;
import java.io.File;
public interface DBLogger extends Logger {
void openConnection();
void closeConnection();
}
<file_sep>/java/src/ch03/srp/customerService/solution/ex/CustomerAlreadyLoggedException.java
package org.javaturk.cc.ch03.srp.customerService.solution.ex;
public class CustomerAlreadyLoggedException extends Exception {
public CustomerAlreadyLoggedException(String message) {
super(message);
}
}
<file_sep>/java/src/ch03/isp/paper/security/TimerClient.java
package org.javaturk.cc.ch03.isp.paper.security;
public interface TimerClient {
public void timeout() ;
}
|
e0997b55ad699a9eca6dcec12715088f9990ef5c
|
[
"Java"
] | 57 |
Java
|
musaogural/CleanCode
|
9ffb1ab4501928029eaad80d264e33ac080f0a4c
|
579450a0bac50b507a660dd3c54cc99127079aa3
|
refs/heads/master
|
<repo_name>Wisnia144/Ch15_TVRemote<file_sep>/Friends/Friends/ClassTV.h
#ifndef _TV_H
#define _TV_H
class Tv
{
public:
friend class RemoteX;
Tv():state(Off),volume(10),maxChannel(50),channel(1){}
enum {Off,On};
enum {MinVal,MaxVal = 20};
enum {TV,DVD};
bool onoff(void) ;
void volUp(void) {volume++;}
void volDown(void) {volume--;}
void chUp(void){channel++;}
void chDown(void){channel--;}
private:
int state;
int volume;
int maxChannel;
int channel;
};
#endif<file_sep>/Friends/Friends/ClassRemote.cpp
#include <iostream>
#include "ClassRemote.h"
#include "ClassTV.h"
/*
Remote():mode(1) {};
void OnOff(void);
void volUp(void);
void volDown(void);
void chUp(void);
void chDown(void);
*/
void RemoteX::OnOff(Tv & t)
{
std::cout << t.onoff();
}
void RemoteX::volUp(Tv & t)
{
t.volume++;
std::cout << "volUp:" << t.volume << "\n";
}
void RemoteX::volDown(Tv & t)
{
t.volume--;
std::cout << "volDown:" << t.volume << "\n";
}
void RemoteX::chUp(Tv & t)
{
t.channel++;
std::cout << "chUp:" << t.channel << "\n";
}
void RemoteX::chDown(Tv & t)
{
t.channel--;
std::cout << "chwn:" << t.channel << "\n";
}
<file_sep>/Friends/Friends/MainHeader.h
#include <iostream>
#include "conio.h"
#include "ClassTV.h"
#include "ClassRemote.h"<file_sep>/Friends/Friends/ClassRemote.h
#ifndef _RemoteX_H
#define _RemoteX_H
class RemoteX
{
public:
friend class Tv;
enum Modes {NORMAL,INTERACTIVE};
RemoteX(Modes inMode = NORMAL):mode(1),modeWork(inMode) {};
void OnOff(Tv & t);
void volUp(Tv & t);
void volDown(Tv & t);
void chUp(Tv & t);
void chDown(Tv & t);
void SetMode(Modes mMode);
private:
int mode;//tv or dvd
Modes modeWork;
};
#endif<file_sep>/Friends/Friends/Source.cpp
#include "MainHeader.h"
using std::cout;
class RemoteManager
{
public:
RemoteManager(RemoteX x)
{
eRemote = x;
cout << "constr\n";
};
void helpDisplay(void)
{
cout << "Up #72 (VolUp)\n";
cout << "Down #80 (VolDown)\n";
cout << "Right #77 (ChUp)\n";
cout << "Left #75 (ChDown)\n";
};
void switchCases(const char c)
{
switch (c){
case 80:
eRemote.volDown(eTv);
break;
case 72:
eRemote.volUp(eTv);
break;
case 77:
eRemote.chUp(eTv);
break;
case 75:
eRemote.chDown(eTv);
break;
case 'h':
helpDisplay();
break;
};
};
private:
RemoteX eRemote;
Tv eTv;
};
int main()
{
std::cout << "\n============\n";
Tv eTv;
RemoteX eRem;
RemoteManager rMan(eRem);
eRem.chDown(eTv);
eRem.volUp(eTv);
char c = 'h';
do{
if(c > 0)
cout << static_cast<int>(c) <<"...(h dor help)\n";
c = _getch();
rMan.switchCases(c);
}while(c !='k' && c !=27);
return 0;
}<file_sep>/Friends/Friends/ClassTV.cpp
#include "ClassTV.h"
bool Tv::onoff()
{
state = (state == Off) ? Off :On;
if (state == On)
return true;
else
return false;
}
|
61c1b56f814f9cbdd7f8427cfdd10639017ad167
|
[
"C++"
] | 6 |
C++
|
Wisnia144/Ch15_TVRemote
|
b8bddf3f796afb7990826bfbe7f86c745b427fc1
|
f12e23da510d1a6b8ef71c5da6481dfffb8e49f2
|
refs/heads/master
|
<file_sep>from graphics import*
from button import*
from Computing_Value import*
from Judge_Winner import*
import random
def main():
board = [] # Define a chessboard without any piece.
row = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] # Use 0, 1, 2 indict 'no piece', 'black piece' and 'white piece'
for i in range(15):
board.append(row[:])
win = GraphWin("Chess Game", 800, 640) # Initilising the version window.
win.setCoords(-1, -1, 19, 15) # We set the coords like this to make the coordinates of our board can be represented by the number berteen 0 and 14.
R1 = Rectangle(Point(-0.5, -0.5), Point(14.5, 14.5))
R1.setFill('yellow')
R1.setOutline('yellow')
R1.draw(win)
for i in range(15):
Line(Point(0, i), Point(14, i)).draw(win)
Line(Point(i, 0), Point(i, 14)).draw(win)
# Initilising the buttons.
gButton = Button(win, Point(17, 13), 2, 1, "Give up")
rButton = Button(win, Point(17, 9), 2, 1, "Regret")
reButton = Button(win, Point(17, 5), 2, 1, "Restart")
qButton = Button(win, Point(17, 1), 2, 1, 'Quit')
gButton.activate()
rButton.activate()
reButton.activate()
qButton.activate()
stepx, stepy = [], [] # To record the squence of the coordinates of player's pieces.
stepxc, stepyc = [], [] # To record the squence of the coordinates of computer's pieces.
bpsquence = [] # A list to contain the instances of balck pieces
wpsquence = [] # A list to contain the instances of white pieces
Quit = 0 # An indicator to show whether the button 'quit' has been clicked.
Beaten = 0 # An indicator to show whether the player has been beaten.
Won = 0 # An indicator to show whether the player has gained the victory.
TWon = Text(Point(7, 14.5), "Congratulations! You've won the game.")
TWon.setSize(20)
T = Text(Point(7, 14.5), "Unfortunately, you've given up.")
T.setSize(20)
Tbeaten = Text(Point(7, 14.5), "Unfortunately, you've beaten.")
Tbeaten.setSize(20)
while True:
if Quit:
win.close()
break
q = win.getMouse()
if gButton.clicked(q):
T.draw(win)
break
''' If the button 'regret' is clicked, we should wipe the record of the latest steps of player the computer.
What's more, we also need to undraw the pieces and wipe the latest recorded instances of pieces.'''
if rButton.clicked(q):
if len(stepx) != 0:
stepx.pop()
stepy.pop()
bpsquence[-1].undraw()
board[bpsquence[-1].x][bpsquence[-1].y] = 0
bpsquence.pop()
stepxc.pop()
stepyc.pop()
wpsquence[-1].undraw()
board[wpsquence[-1].x][wpsquence[-1].y] = 0
wpsquence.pop()
continue
''' If the button 'restart' is clicked, we should wipe the all records of the latest steps of player the computer.
What's more, we also need to undraw the pieces and wipe the all recorded instances of pieces.'''
if reButton.clicked(q):
if Beaten: Tbeaten.undraw()
if Won: TWon.undraw()
for i in range(len(bpsquence)):
bpsquence[i].undraw()
for j in range(len(wpsquence)):
wpsquence[j].undraw()
stepx, stepy = [], []
stepxc, stepyc = [], []
board = [[0 for col in range(15)] for row in range(15)]
continue
if qButton.clicked(q):
Quit = 1
break
px = int(round(q.getX())) # To round the raw coordinate to the nearest integer.
py = int(round(q.getY()))
stepx.append(px)
stepy.append(py)
if (-1 < px < 15) and (-1 < py < 15) and (board[px][py] == 0):
''' If the given coordinate is contained by the chess board, we catch it and seem it as a legal step.
Then, we should record the instance and the coordinate in different squences and draw the piece on
the chess board. And then, if the player reach to the demand of victory, the game ends, or the game
comes to the computer's turn. '''
board[px][py] = 1
bpiece = chesspiece(px, py,win, 1)
bpsquence.append(bpiece)
bpiece.draw()
V1, M1, V2, M2 = Main_Computing(board)
Value_Max_Squence = []
Max_Value = 0
if Player_Won(board):
TWon.draw(win)
Won = 1
else:
for x in range(15):
for y in range(15):
if x == 0 and y == 0:
Value_Max_Squence.append([0, 0])
Max_Value = V1[x][y]
else:
if Max_Value < V1[x][y]:
Value_Max_Squence = [[x, y]]
Max_Value = V1[x][y]
if Max_Value == V1[x][y]:
Value_Max_Squence.append([x, y])
if Max_Value < V2[x][y]:
Value_Max_Squence = [[x, y]]
Max_Value = V2[x][y]
if Max_Value == V2[x][y]:
Value_Max_Squence.append([x, y])
if not (Computer_Won(board)):
'''If the computer has reached the demand of victory(five-chess-in-a-row),the games ends, or
the computer will choose an insersection depending on their values to determine its next step.'''
NextStep = random.sample(Value_Max_Squence, 1)
wx, wy = NextStep[0][0], NextStep[0][1]
board[wx][wy] = 2
stepxc.append(wx)
stepyc.append(wy)
wpiece = chesspiece(wx, wy, win, 2)
wpsquence.append(wpiece)
wpiece.draw()
else:
wx, wy = Computer_Won(board)
board[wx][wy] = 2
stepxc.append(wx)
stepyc.append(wy)
wpiece = chesspiece(wx, wy, win, 2)
wpsquence.append(wpiece)
wpiece.draw()
Tbeaten.draw(win)
Beaten = 1
win.getMouse()
win.close()
class chesspiece: # Define the class of pieces and the legal operation on its instance.
def __init__(self,x,y,w, color):
if color == 1: self.c = 'black'
if color == 2 : self.c = 'white'
self.w = w
self.point = (x,y)
self.x = x
self.y = y
self.piece = Circle(Point(self.x,self.y),0.5)
self.piece.setFill(self.c)
self.piece.setOutline(self.c)
def draw(self):
self.piece.draw(self.w)
def undraw(self):
self.piece.undraw()
main()
<file_sep># Chess-in-Five
It's a project to create an AI of the game Chess-in-Five. We'd like to realize it in two ways: the explicit alogrithm and the machine learning.
<file_sep># This is the module to judge if till a moment the player or the computer has won the chess game already.
import Computing_Value
def Player_Won(board): # To judge if the player has won the game.
for x in range(15):
for y in range(15):
for k in range(4):
print Computing_Value.Basic_Value_Onedirection(x, y, k, 1, board, 1)
if Computing_Value.Basic_Value_Onedirection(x, y, k, 1, board, 1) == 4 and board[x][y] == 1:
# With the label = 1, we get the returned pure num of Max_Inline.
# If that equals 4.,added with the piece on the point (x,y), player have five pieces in a row, so he wins.
return 1
return 0
def Computer_Won(board): # To judge if the computer has won the game.
for x in range(15):
for y in range(15):
V1, M1, V2, M2 = Computing_Value.Main_Computing(board)
if board[x][y] == 0 and M2[x][y] == 4: return x,y # To the return coordinate of 'winning point'.
return 0
<file_sep># This is the module to computer the 'Value' of a certain intersection of the board.
# The 'Value' suggests the rank point of the intersection in term of the possibility of winning the game
def Edge_ValueLoss(x,y,side,board):
''' More closely a piece is to the edge, the lower value it has, So, we constructed a
decreasing function in term of the distance bewteen a coordinate and the edge as a
penalty function to make the final value of the certain coordinate more representative.'''
Value_EdgeLoss = 0.0 # We initialise the Value_EdgeLoss as a tyoe of 'float'.
for i in range(4):
Edge_Loss = pow(1.0/2,i)
if (x + i == 0) or (x + i == 14)or(x + (-i) == 0) or (x + (-i) == 14):
Value_EdgeLoss -= Edge_Loss
if (y + i == 0) or (y + i == 14)or(y + (-i) == 0) or (y + (-i) == 14):
Value_EdgeLoss -= Edge_Loss
return Value_EdgeLoss
def Basic_Value_Onedirection(x,y,direction,side,board,label):
''' The 'direction' can be 0,1,2,3 which present right-left, up-down, ul-lr and ur-ll.
And the side represent the player by the value 1 and the computer by the value 2.
The 'label' can be 1 or 0, to return the 'raw Maxline' or the 'penaltied dMaxline' '''
if side == 1: enemy = 2
if side == 2: enemy = 1
Value = 0
Max_Inline = 0
if direction == 0: dx, dy = 1, 0
if direction == 1: dx, dy = 0, 1
if direction == 2: dx, dy = 1, -1
if direction == 3: dx, dy = 1, 1
i =1
# We take advantage of a loop to calculate the num of pieces in a row in four lines and eight directions.
while (-1 < x + i * dx < 15) and (-1 < y + i * dy < 15): # To judge if the point is still inside the chess board.
if (board[x + i * dx][y + i * dy] == side): # If the next coordinate is taken in by the 'side''s own piece, we continute the loop.
Value += 1
Max_Inline += 1
if (board[x + i * dx][y + i * dy] == enemy):
''' If the next coordinate is taken in by the 'enemy', the loop stops. Otherwise, we give a 'penality' to
the value equalling 1.'''
Value -= 1
break
if (board[x + i * dx][y + i * dy] == 0): # If there is no piece on the next coordinate, the loop stops.
break
i += 1
Max_Linesimple = Max_Inline # Represent the number of pieces in a row toward one direction among a total number of eight.
i = 1
while (-1 < x + (-i) * dx < 15) and (-1 < y + (-i) * dy < 15): # We begin a loop for searching in the same the line by on opposite direction.
if (board[x + i * (-dx)][y + i * (-dy)] == side):
Value += 1
Max_Inline += 1
if (board[x + i * (-dx)][y + i * (-dy)] == enemy):
Value -= 1
break
if (board[x + i * (-dx)][y + i * (-dy)] == 0):
break
i += 1
if label == 0:
if board[x][y] == 0:
if ((Max_Linesimple > 2) and (Max_Inline > 3)) or ((Max_Linesimple > 0) and (Max_Inline > 3)):
return Value+2, 4
if ((Max_Linesimple > 1) and (Max_Inline > 3)) :
return Value+2, 4
if (Max_Linesimple > 0)and(Max_Inline > 2):
return Value+1,Max_Inline
else: return Value, Max_Inline
else: return 0, 0
if label == 1:
return Max_Inline
def Basic_Value(x,y,side, board):
'''' Caculating the total basic value for the point (x,y) to the computer considering the all four direction.
And the side represent the player by the value 1 and the computer by the value 2.'''
Value0, Max0 = Basic_Value_Onedirection(x, y, 0, side, board, 0)
Value1, Max1 = Basic_Value_Onedirection(x, y, 1, side, board, 0)
Value2, Max2 = Basic_Value_Onedirection(x, y, 2, side, board, 0)
Value3, Max3 = Basic_Value_Onedirection(x, y, 3, side, board, 0)
Value_EdgeLoss = Edge_ValueLoss(x,y,side,board)
Value_total = Value0 + Value1 + Value2 + Value3 + Value_EdgeLoss
Max_Line = max(Max0, Max1, Max2, Max3)
return Value_total, Max_Line
def Extra_Value(x,y,side, board):
'''To add the extra value to the total in terms of the forming of special occasions such as 3-3, 3-4 or 4-4 and so on.
The Extra Value given to different match should represent its ability to lead to the final vactory.'''
Extra_Value = 0
Basic_Value_onthisdirection = [0,0,0,0]
Max_Line_Inthisdir = [0,0,0,0]
totalValue = 0
for i in range(4):
Basic_Value_onthisdirection[i], Max_Line_Inthisdir[i] = Basic_Value_Onedirection(x,y,i,side, board, 0)
totalValue += Basic_Value_onthisdirection[-1]
'''In the part below, we calculate the extra value in term of some special situation such a 4-4 or 3-3 or 4-3
and so on. And the extra score added to each situation is determined by its possibility of leading to a
final victory. '''
if (3 in Basic_Value_onthisdirection): Extra_Value += 1.5
if (3 in Basic_Value_onthisdirection)and(2 in Basic_Value_onthisdirection): Extra_Value += 5
if (4 in Basic_Value_onthisdirection): Extra_Value += 5
if totalValue > 6 : Extra_Value += 2.5
if 3 in Max_Line_Inthisdir: Extra_Value += 10
if 4 in Max_Line_Inthisdir: Extra_Value += 12
''' Because the value 'Max_Line_Inthisdir' represent the num of penaltied Max_Inline of pieces in a row on a single
direction, which foresees a obvious victory when equaling 3, so we give this sitation a pretty big value to show
its highest priority.'''
return Extra_Value
def Main_Computing(board):
'''The main process to compute the value of a certain poing given. The value finally given consists of the basic
value to show the value of the coordinate and the state of surrounded pieces, and the extra value which is determined
by the existance of some special occasion of surrounded pieces.'''
Value_Table1 = []
MaxLine1 = []
Value_Table2 = []
MaxLine2 = []
row = [0]*15
for m in range(15):
Value_Table1.append(row[:])
Value_Table2.append(row[:])
MaxLine1.append(row[:])
MaxLine2.append(row[:])
for x in range(15):
for y in range(15):
BasicValue1, maxline1 = Basic_Value(x, y, 1, board)
ExtraValue1 = Extra_Value(x, y, 1, board)
Value_Table1[x][y] = BasicValue1 + ExtraValue1
MaxLine1[x][y] = maxline1
BasicValue2, maxline2 = Basic_Value(x, y, 2, board)
ExtraValue2 = Extra_Value(x, y, 2, board)
Value_Table2[x][y] = BasicValue2 + ExtraValue2
MaxLine2[x][y] = maxline2
return Value_Table1, MaxLine1, Value_Table2, MaxLine2
|
01a467cc69fbcf8dac77b3955a984b10b7b185c8
|
[
"Markdown",
"Python"
] | 4 |
Python
|
noahcao/Chess-in-Five
|
0cb416132060b0f84e949826babf76261341e0e4
|
482da62a719f1542a3fc09c940394e6fab2f6d42
|
refs/heads/master
|
<repo_name>dbgroup-uestc/JiangHuaqi<file_sep>/code/NeuralMemory_Adversarial_CODE/MN_GAN.py
import model_GAN
import torch
from torch.autograd import Variable
import torch.optim as optim
from sampler import WARPSampler
from dataloader import movielens
import numpy as np
from tqdm import tqdm
import pandas as pd
import pdb
import time
import os
os.environ["CUDA_DEVICE_ORDER"]="PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"]="0"
if __name__ == '__main__':
dim = 64
memory_size = 128
# 1000
BATCH_SIZE = 1000
# 200
margin = 4
use_rank_weight = True
lr = 0.2
user_negs_n = 5000
n_candiidates = 50
n_negative = 10
topk = 30
"""
pre training
"""
train1_pd, test1_pd, test2_pd, test3_pd, test4_pd, most_popular_items, n_users, n_items = movielens(
'datasets/ml/ratings.csv')
n_users = int(n_users)
n_items = int(n_items)
networkD = model_GAN.Dis(dim = dim, n_users = n_users, n_items = n_items, memory_size = memory_size)
networkG = model_GAN.Gen(dim = dim, n_users = n_users, n_items = n_items)
networkD = networkD.cuda()
networkG = networkG.cuda()
optimizerD = optim.Adagrad(networkD.parameters(), lr = lr)
optimizerG = optim.Adagrad(networkG.parameters(), lr = lr)
# pretrain
sampler = WARPSampler(train1_pd, most_popular_items, n_items, batch_size = BATCH_SIZE,
n_candiidates = n_negative,
check_negative = True)
for user_pos, neg_cands in tqdm(sampler.next_batch(), desc = 'pre training',
total = train1_pd.shape[0] / BATCH_SIZE):
"""
Pre-training
"""
networkD.zero_grad()
pos_users = user_pos[:, 0].astype(int)
pos_items = user_pos[:, 1].astype(int)
pos_users = Variable(torch.from_numpy(pos_users)).cuda()
pos_items = Variable(torch.from_numpy(pos_items)).cuda()
neg_cands = Variable(torch.from_numpy(neg_cands)).cuda()
d_loss = networkD(pos_users, pos_items, neg_cands, use_rank_weight, margin)
d_loss.backward()
optimizerD.step()
networkG.zero_grad()
D_G_p, D_G_n, _, _, _ = networkG(pos_users, pos_items, neg_cands, n_negative)
probs = D_G_p / torch.sum(D_G_n, dim = 1, keepdim = True)
g_loss = torch.sum(probs)
g_loss.backward()
optimizerG.step()
"""
Adversarial and streming updating
"""
test_pds = [test1_pd, test2_pd, test3_pd, test4_pd]
# test_pds = [test1_pd]
train_pd = train1_pd
previous_test_pd = train1_pd
for test_part, test_pd in enumerate(test_pds):
user_to_train_set = dict()
user_to_test_set = dict()
train_users = train_pd['user'].values.tolist()
train_items = train_pd['item'].values.tolist()
all_users_in_train = set(train_users)
all_items_in_train = set(train_items)
for t in train_pd.itertuples():
user_to_train_set.setdefault(t.user, set())
user_to_train_set[t.user].add(t.item)
for t in test_pd.itertuples():
user_to_test_set.setdefault(t.user, set())
user_to_test_set[t.user].add(t.item)
sampler = WARPSampler(previous_test_pd, most_popular_items, n_items, batch_size = BATCH_SIZE,
n_candiidates = n_candiidates,
check_negative = True)
epoch = 0
while epoch < 10:
epoch += 1
for user_pos, neg_cands in tqdm(sampler.next_batch(), desc = 'epoch:{}, training'.format(epoch),
total = previous_test_pd.shape[0] / BATCH_SIZE):
networkD.zero_grad()
pos_users = user_pos[:, 0].astype(int)
pos_items = user_pos[:, 1].astype(int)
pos_users = Variable(torch.from_numpy(pos_users)).cuda()
pos_items = Variable(torch.from_numpy(pos_items)).cuda()
neg_cands = Variable(torch.from_numpy(neg_cands)).cuda()
_, _, _, _, negs_index = networkG(pos_users, pos_items, neg_cands, n_negative)
# neg_item = torch.gather(neg_cands, dim = 1, index = neg_index)
neg_items = torch.gather(neg_cands, dim = 1, index = negs_index)
# train D
d_loss = networkD(pos_users, pos_items, neg_items.detach(), use_rank_weight, margin)
d_loss.backward()
optimizerD.step()
# train G
networkG.zero_grad()
_, _, probs, neg_index, _ = networkG(pos_users, pos_items, neg_cands, n_negative)
neg_item = torch.gather(neg_cands, dim = 1, index = neg_index)
log_prob = torch.gather(probs, dim = 1, index = neg_index).log()
D_D = networkD.get_D_D(pos_users, neg_item)
g_loss = torch.sum(log_prob * D_D)
g_loss.backward()
optimizerG.step()
"""
Testing
"""
user_negative_samples = dict()
items_set = set(list(range(1, n_items)))
for u in tqdm(user_to_train_set.keys(), desc = 'sampling user negative items'):
user_negative_samples[u] = np.random.choice(list(items_set - user_to_train_set[u]), user_negs_n)
accs = []
# all_items_embeddings = network.item_embeddings.weight
for test_u in tqdm(list(user_to_test_set.keys()), desc = 'testing'):
if test_u not in all_users_in_train:
continue
users_v = Variable(torch.from_numpy(np.array([test_u], dtype = int))).cuda()
# [1, D]
abst_prefers_embeds = networkD.abs_embed(users_v)
hit = 0
tot = 0
for test_v in user_to_test_set[test_u]:
if test_v not in all_items_in_train:
continue
candidate_items = np.append(user_negative_samples[test_u], test_v)
# [N, D]
candidate_items_embeddings = networkD.item_embeddings(
Variable(torch.from_numpy(candidate_items)).cuda())
item_scores = torch.sum((candidate_items_embeddings - abst_prefers_embeds) ** 2, dim = 1)
# item_scores = item_scores.cpu().data.numpy()
# user_tops = np.argpartition(item_scores, -topk)[-topk:]
_, user_tops = torch.topk(item_scores, k = topk, largest = False)
user_tops = user_tops.cpu().data.numpy()
tot += 1
if user_negs_n in user_tops:
hit += 1
if tot > 0:
accs.append(float(hit) / tot)
print('Final accuracy@{} on test {} : {}'.format(topk, np.mean(accs), test_part + 1))
previous_test_pd = test_pd
train_pd = pd.concat([train_pd, test_pd])
<file_sep>/code/GLF_CODE/POI2Vec.py
import logging
import re
import functools
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)
from gensim import corpora, models, similarities
#读取数据
file_object = open("C:\\Users\\Client\\PycharmProjects\\JiangHuaqi\\code\\datasets\\poidata\\Foursquare\\train.txt","rU")
documents = []
try:
for line in file_object:
tempLine = re.split('\t|,',line.strip())
tempLine[0]=tempLine[0][5:]
tempLine[1]=tempLine[1][4:]
documents.append(tempLine)
finally:
file_object.close()
#形成每个用户的行程
USERS = {}
class CheckIn:
def __init__(self,pid,Lat,Lon,day,time):
self.pid = int(pid)
self.Lat = Lat
self.Lon = Lon
self.day = int(day)
self.time = time
def __repr__(self):
return repr((self.pid,self.Lat,self.Lon,self.day,self.time))
for record in documents:
if record[0] not in USERS.keys():
USERS[record[0]] = []
USERS[record[0]].append(CheckIn(record[1],record[2],record[3],record[5],record[4]))
#排序用户行程
for User in USERS:
temp = USERS[User]
temp=sorted(temp,key=lambda checkin:(checkin.day,checkin.time))
USERS[User] = temp
print("HI")
#寻找语句
'''Sentence = []
for user in USERS:
User = USERS[user]
for c in range(len(User)):
cword = User[c]
day = cword.day
[hour,second] = cword.time.split(":")
ctime = (int(day))*24+int(hour)+(int(second))/60
sentence = []
forward = 1
backward = 1
sentence.append(cword.pid)
for i in range(len(User)):
if forward:
if c+i+1 > (len(User))-1:
forward = 0
else:
oword = User[c+i+1]
day1 = oword.day
[hour1,second1] = oword.time.split(":")
otime = (int(day1))*24+int(hour1)+(int(second1))/60
if abs(ctime-otime) > 6:
forward = 0
else:
sentence.append(oword.pid)
if backward:
if c-i-1 < 0:
backward = 0
else:
oword = User[c-i-1]
day1 = oword.day
[hour1,second1] = oword.time.split(":")
otime = (int(day1))*24+int(hour1)+(int(second1))/60
if abs(ctime-otime) > 6:
backward = 0
else:
sentence.insert(0,oword.pid)
if backward==0 and forward==0:
if len(sentence) > 1:
if sentence not in Sentence:
Sentence.append(sentence)
break
'''
for User in USERS:
sentence = ""
for visit in USERS[User]:
if visit != len(USERS[User])-1:
sentence += str(visit.pid) + " "
else:
sentence += str(visit.pid)
print("Hi")<file_sep>/paper/GLF_PAPER/tinking_of_code.md
# 编程思路 #
数据是三元组,每条记录是USER_1083 LOC_1500 1.444 103.768 06:57 0 分别对应用户ID、POI的ID、纬度、经度、时间、第几天。
(已实现)通过code中的POICount.py我们将从里面提取若干信息:
1. 最基础的关于POI的频率字典,形式如下:{“1500”:50},这个频率字典用于后面Huffman构建。
2. POI查阅字典,作用是通过每个POI的ID,查阅其经纬度。这个是在构建最基础的二叉树的时候通过经纬度来判断位置,从而分区。
3. 用户行程字典,包含了每个用户经历过的POI,因为作者把每个用户的行程作为一个句子,所以我们行程字典内容如下:{“1083”:[{pid:...,Lat:...,day:...,time:...},{pid:...,Lat:...,day:...,time:...},...]}.这个字典用于上下文的语句的提出
(未完全实现)通过code的BinaryTree.py实现:
不同于word2vec的CBOW的简单的Huffman tree,作者为了加入地理信息,从而改造了Huffman Tree的构建,实际上就是在给POI建立Huffman Tree之前,首先根据地理信息,将POI分成多个区域,同时又根据POI影响原则,将POI赋予给在规定影响范围内的区域但是POI又不在的区域,但是存在概率问题。这意味着在最后Huffman Tree中有多条通往一个POI的路径,这就涉及概率问题,所以在POI信息,在set集合中的POI需要添加概率信息进去。其中分区域的方法就是二分法,所以形成了一个Binary Tree.
1. 建立一个Tree Node类,里面包含了左右节点信息,和值(叶节点存储的是单词本身,非叶子节点存储中间向量),frequency(用于存储Huffman Tree的构建中节点频率),region(用于基础二叉树建立,包含区域大小、POI的集合、四条经纬度的值)、Huffman Code(Huffman编码用于后面概率计算)
2. 建立一个二叉树...
3. 在二叉树下建立一个Huffman Tree(代码已有)
(未实现)
<file_sep>/code/TensorFlow/Test.py
import tensorflow as tf
from tensorflow.python.framework import graph_util
v1 = tf.Variable(tf.constant(1.0,shape=[1]),name="v1")
v2 = tf.Variable(tf.constant(2.0,shape=[1]),name="v2")
result = v1+v2
saver = tf.train.Saver()
saver.export_meta_graph("./path/to/model/model.ckpt.json",as_text=True)<file_sep>/venv/Lib/site-packages/grpc/_grpcio_metadata.py
__version__ = """1.14.1"""<file_sep>/code/GLF_CODE/POICount.py
import re
import math
from collections import Counter
class POICounter():
#计算POI出现频率,输入文件中记录必须如:USER_1083 LOC_1500 1.4447635942370927,103.76816511154175 06:57 0
def __init__(self,poi_list):
self.poi_list = poi_list
self.count_res = None
self.user_dict = None
self.poi_dict = {}
self.POI_Count(self.poi_list)
def POI_Count(self,poi_list):
count = 0
filter_poi_list = []
documents = []
poi_dict = {}
for line in poi_list:
tempLine = re.split('\t|,', line.strip())
tempLine[0] = tempLine[0][5:]
tempLine[1] = tempLine[1][4:]
documents.append(tempLine)
#形成用户行程和poi字典,属性为{'pid':{"lat":1,"lon":1}
USERS = {}
class CheckIn:
def __init__(self,pid,Lat,Lon,day,time):
self.pid = int(pid)
self.Lat = float(Lat)
self.Lon = float(Lon)
self.day = int(day)
self.time = time
def __repr__(self):
return repr((self.pid,self.Lat,self.Lon,self.day,self.time))
for record in documents:
if record[0] not in USERS.keys():
USERS[record[0]] = []
USERS[record[0]].append(CheckIn(record[1],record[2],record[3],record[5],record[4]))
self.poi_dict[int(record[1])] = {"lat":float(record[2]),"lon":float(record[3])}
#排序用户行程
for User in USERS:
temp = USERS[User]
temp=sorted(temp,key=lambda checkin:(checkin.day,checkin.time))
USERS[User] = temp
self.user_dict = USERS
#将用户行程变成句子
for User in USERS:
sentence = ""
for visit in USERS[User]:
if visit != len(USERS[User]) - 1:
sentence += str(visit.pid) + " "
else:
sentence += str(visit.pid)
filter_poi_list.append(sentence)
c = Counter()
for sentence in filter_poi_list:
templist = sentence.split(" ")
c.update(sentence.split(" "))
del c['']
self.count_res = c
for poi in c.keys():
self.poi_dict[int(poi)]["frequence"] = c[poi]
if __name__ == '__main__': #用于模块测试的技巧,当程序从主模块运行,则不会执行这个模块
file_object = open("C:\\Users\\Client\\PycharmProjects\\JiangHuaqi\\code\\datasets\\poidata\\Foursquare\\train.txt","rU")
pc = POICounter(file_object)
print(pc.count_res)
<file_sep>/code/GLF_CODE/xx.py
# -*- coding: utf-8 -*-
"""
Created on Apr 28 2016
Extracting vocabulary from Youdao dictionary
The vocabulary text file should be code as utf-8
<INPUT>
file_in: the exported vocabulary from Youdao
</INPUT>
<OUTPUT>
file_out: the file to save the English words. Default file name is
new_words_'time'.txt ('time' is the local date)
<OUTPUT>
@author: sinit
"""
import codecs,time
file_in = r'D:\voc.txt'
outname = 'new_words'+'_'+time.strftime("%Y-%m-%d",time.localtime())+".txt"
file_out = r'D:\\'+outname
fs = codecs.open(file_in, 'r','utf-8')
vocabulary = fs.readlines()
fs.close()
word = []
word.append(vocabulary[0].split()[1])
def is_chinese(uchar):
#Judge if a unicode is Chinese
if (uchar >=u'/u4e00')&(uchar<=u'/u9fa5'):
return True
else:
return False
def is_zh (c):
x = ord (c)
# Punct & Radicals
if x >= 0x2e80 and x <= 0x33ff:
return True
# Fullwidth Latin Characters
elif x >= 0xff00 and x <= 0xffef:
return True
# CJK Unified Ideographs &
# CJK Unified Ideographs Extension A
elif x >= 0x4e00 and x <= 0x9fbb:
return True
# CJK Compatibility Ideographs
elif x >= 0xf900 and x <= 0xfad9:
return True
# CJK Unified Ideographs Extension B
elif x >= 0x20000 and x <= 0x2a6d6:
return True
# CJK Compatibility Supplement
elif x >= 0x2f800 and x <= 0x2fa1d:
return True
else:
return False
for i in range(1,len(vocabulary)):
line = vocabulary[i].split()
if vocabulary[i].split()[0][:-1].isdigit():
newword = vocabulary[i].split()[1]
if is_zh(newword[0]):
continue
else:
word.append(vocabulary[i].split()[1])
fs = open(file_out, 'w+')
for line in word:
fs.write(line)
fs.write('\n')
fs.close()
print('Assignment Done!')
<file_sep>/code/NeuralMemory_Adversarial_CODE/dataloader.py
import numpy as np
from tqdm import tqdm
import pandas as pd
from sklearn.utils import shuffle
import pdb
def movielens(path, rating_thres = 1, seq_len = 1, split_ratio = (4, 2, 2, 2, 2)):
ratings = pd.read_csv(path, names = ['user', 'item', 'rating', 'time'], header = 0,
dtype = {'user': np.int32, 'item': np.int32, 'rating': np.float64, 'time': np.int32})
# unique_users = np.sort(ratings['user'].unique())
# unique_items = np.sort(ratings['item'].unique())
# unique_times = np.sort(ratings['time'].unique())
# print(ratings.shape)
# print(len(unique_users))
# print(len(unique_items))
# ratings = ratings[ratings['rating'] < 3]
# print(ratings.shape)
#
# user_remap = {}
# for i, user in enumerate(unique_users):
# user_remap[user] = i
# ratings['user'] = ratings['user'].map(lambda x : user_remap[x])
# item_remap = {}
# for i, item in enumerate(unique_items):
# item_remap[item] = i
# ratings['item'] = ratings['item'].map(lambda x : item_remap[x])
ratings = ratings[ratings['rating'] >= rating_thres]
user_cnt = ratings['user'].value_counts().to_dict()
ratings = ratings[ratings['user'].map(lambda x: user_cnt[x] >= seq_len)]
most_popular_items = ratings.groupby('item').size().sort_values(ascending = False)[:500].index.values.tolist()
n_items = ratings['item'].max() + 1
n_users = ratings['user'].max() + 1
ratings = ratings.sort_values(by = ['time'])
ratings_len = ratings.shape[0]
train_len = int(ratings_len * split_ratio[0] / sum(split_ratio))
test1_len = int(ratings_len * split_ratio[1] / sum(split_ratio))
test2_len = int(ratings_len * split_ratio[2] / sum(split_ratio))
test3_len = int(ratings_len * split_ratio[3] / sum(split_ratio))
# test4_len = int(ratings_len * split_ratio[1] / sum(split_ratio))
train_pd, test1_pd, test2_pd, test3_pd, test4_pd = np.split(ratings, [train_len, train_len + test1_len,
train_len + test1_len + test2_len,
train_len + test1_len + test2_len + test3_len])
return train_pd, test1_pd, test2_pd, test3_pd, test4_pd, most_popular_items, n_users, n_items
if __name__ == '__main__':
train_pd, test1_pd, test2_pd, test3_pd, test4_pd, most_popular_items, n_users, n_items = movielens('datasets/ml/ratings.csv')
<file_sep>/code/NeuralMemory_Adversarial_CODE/model_GAN.py
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.nn.parameter import Parameter
import pdb
class Dis(nn.Module):
def __init__(self, dim, n_users, n_items, memory_size = 64):
super(Dis, self).__init__()
self.memory_size = memory_size
self.dim = dim
self.n_users = n_users
self.n_items = n_items
self.key_matrix = Parameter(torch.Tensor(memory_size, dim))
self.value_matrix = Parameter(torch.Tensor(memory_size, dim))
self.user_embeddings = nn.Embedding(n_users, dim, max_norm = 1)
self.item_embeddings = nn.Embedding(n_items, dim, max_norm = 1)
self.e_linear = nn.Linear(dim, dim)
self.a_linear = nn.Linear(dim, dim)
self.key_matrix.data.normal_(0, 1)
self.value_matrix.data.normal_(0, 1)
def forward(self, users, items, neg_items, use_rank_weight, margin):
# [batch_size]
# embed = [batch_size, dim}
users_embed = self.user_embeddings(users)
items_embed = self.item_embeddings(items)
neg_items_embed = self.item_embeddings(neg_items)
# attentionW
ex_users_embed = torch.unsqueeze(users_embed, 1)
attention = torch.sum((ex_users_embed - self.key_matrix) ** 2, dim = 2)
correlation_weight = F.softmax(-attention, dim = 1)
# read
# [1, memory size, dim]
ex_value_matrx = torch.unsqueeze(self.value_matrix, dim = 0)
# [batch size, memory size, 1]
ex_correlation_weight = torch.unsqueeze(correlation_weight, dim = 2)
abst_items_embed = torch.sum(ex_value_matrx * ex_correlation_weight, dim = 1)
# write
# [batch size, dim]
erase_vector = self.e_linear(items_embed)
erase_signal = F.sigmoid(erase_vector)
add_vector = self.a_linear(items_embed)
add_signal = F.tanh(add_vector)
# [batch size, 1, dim]
ex_erase_signal = torch.unsqueeze(erase_signal, 1)
# w_t(i) * e_t
# # [batch size, 1, dim]
# [batch size, memory size, 1]
erase_mul = torch.mean(ex_erase_signal * ex_correlation_weight, dim = 0)
erase = self.value_matrix * (1 - erase_mul)
add_reshaped = add_signal.view(-1, 1, self.dim)
add_mul = torch.mean(add_reshaped * ex_correlation_weight, dim = 0)
self.value_matrix.data = (erase + add_mul).data
# pos_distances = torch.sum((users_embed - items_embed) ** 2, dim = 1)
# distance_to_neg_items = torch.sum((torch.unsqueeze(users_embed, 1) - neg_items_embed) ** 2, dim = 2)
# closest_negative_item_distances = torch.min(distance_to_neg_items, dim = 1)[0]
pos_abst_distances = torch.sum((abst_items_embed - items_embed) ** 2, dim = 1)
abst_distance_to_neg_items = torch.sum((torch.unsqueeze(abst_items_embed, 1) - neg_items_embed) ** 2, dim = 2)
closest_abst_negative_item_distances = torch.min(abst_distance_to_neg_items, dim = 1)[0]
loss_per_pair = torch.clamp(pos_abst_distances - closest_abst_negative_item_distances + margin, min = 0)
# loss_per_pair = torch.clamp(
# pos_abst_distances - closest_abst_negative_item_distances + pos_distances - closest_negative_item_distances + margin,
# min = 0)
if use_rank_weight:
# indicator matrix for impostors (N x W)
impostors = (torch.unsqueeze(pos_abst_distances, -1) - abst_distance_to_neg_items + margin) > 0
# impostors = (torch.unsqueeze(pos_abst_distances, -1) - abst_distance_to_neg_items + torch.unsqueeze(pos_distances, -1) - distance_to_neg_items + margin) > 0
rank = torch.mean(impostors.float(), dim = 1) * self.n_users
loss_per_pair *= torch.log(rank + 1)
loss = torch.mean(loss_per_pair)
return loss
def get_D_D(self, users, neg_item):
# [batch_size]
# embed = [batch_size, dim}
users_embed = self.user_embeddings(users)
neg_item_embed = self.item_embeddings(neg_item)
neg_item_embed = torch.squeeze(neg_item_embed, 1)
# attentionW
ex_users_embed = torch.unsqueeze(users_embed, 1)
attention = torch.sum((ex_users_embed - self.key_matrix) ** 2, dim = 2)
correlation_weight = F.softmax(-attention, dim = 1)
# read
# [1, memory size, dim]
ex_value_matrx = torch.unsqueeze(self.value_matrix, dim = 0)
# [batch size, memory size, 1]
ex_correlation_weight = torch.unsqueeze(correlation_weight, dim = 2)
abst_item_embed = torch.sum(ex_value_matrx * ex_correlation_weight, dim = 1)
D_D = torch.sum((abst_item_embed - neg_item_embed) ** 2, dim = 1)
return D_D
def abs_embed(self, users):
# [N, D]
users_embed = self.user_embeddings(users)
# attentionW
#
ex_users_embed = torch.unsqueeze(users_embed, 1)
attention = torch.sum((ex_users_embed - self.key_matrix) ** 2, dim = 2)
correlation_weight = F.softmax(-attention, dim = 1)
# read
# [1, memory size, dim]
ex_value_matrx = torch.unsqueeze(self.value_matrix, dim = 0)
# [batch size, memory size, 1]
ex_correlation_weight = torch.unsqueeze(correlation_weight, dim = 2)
read_content = torch.sum(ex_value_matrx * ex_correlation_weight, dim = 1)
return read_content
class Gen(nn.Module):
def __init__(self, dim, n_users, n_items):
super(Gen, self).__init__()
self.n_users = n_users
self.n_items = n_items
self.n1 = 128
self.n2 = 32
self.C = nn.Embedding(n_users, dim, max_norm = 1)
self.D = nn.Embedding(n_items, dim, max_norm = 1)
self.l1 = nn.Linear(dim, 128)
self.l2 = nn.Linear(dim, 128)
def forward(self, user, pos_items, neg_cands, n_neg):
user_vector = self.C(user)
user_vector = self.l1(user_vector)
# items_vector = self.D(item)
neg_cands_vector = self.D(neg_cands)
neg_cands_vector = self.l2(neg_cands_vector)
pos_items_vector = self.D(pos_items)
pos_items_vector = self.l2(pos_items_vector)
# pos_distances = torch.exp(-torch.sum((user_vector - items_vector) ** 2))
D_G_p = torch.exp(-torch.sum((user_vector - pos_items_vector) ** 2, dim = 1))
user_vector = torch.unsqueeze(user_vector, 1)
D_G_n = torch.exp(-torch.sum((user_vector - neg_cands_vector) ** 2, dim = 2))
probs = D_G_n / torch.sum(D_G_n, dim = 1, keepdim = True)
# shape (batchsize, n_neg]
# m = Categorical(probs)
# neg_index = m.sample()
# negs_index = m.sample_n(n_neg).t()
# neg_index = torch.unsqueeze(neg_index, dim = 1)
neg_index = torch.multinomial(probs, 1, False)
negs_index = torch.multinomial(probs, n_neg, False)
return D_G_p, D_G_n, probs, neg_index, negs_index
<file_sep>/paper/GLF_PAPER/数学原理.md
# 数学原理 #
## 梯度下降法 ##
见https://www.cnblogs.com/pinard/p/5970503.html
## 神经网络 ##
参考西瓜书神经网络
## sigmoid函数 ##
$sigmoid(x) = \dfrac{1}{1+e^{-x}}$
作用就是将置域$(-\infty,+\infty)$隐射到(0,1)之间,其中在0附近梯度超级大,所以常用于需要输出为0,1的情况,往往用它来替换非连续的单位阶跃函数$y=\begin{cases}1,x>0\\0.5,x=0\\0,x<0\end{cases}$.<file_sep>/code/TensorFlow/mnist_train.py
import os
import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data
import tensorflow.contrib as tc
import mnist_inference
BATCH_SIZE = 100
LEARNING_RATE_BASE = 0.8
LEARNING_RATE_DECAY = 0.99
REGULARAZION_RATE = 0.0001
TRANING_STEPS = 30000
MOVING_AVERAGE_DECAY = 0.99
MODEL_SAVE_PATH = "./path/to/model"
MODEL_NAME = "model.ckpt"
def train(mnist):
x = tf.placeholder(tf.float32,[None,mnist_inference.INPUT_NODE],name="x-input")
y_ = tf.placeholder(tf.float32,[None,mnist_inference.OUTPUT_NODE],name="y-input")
regulizer = tc.layers.l2_regularizer(REGULARAZION_RATE)
y = mnist_inference.inference(x,regulizer)
global_step = tf.Variable(0,trainable=False)
variable_averages = tf.train.ExponentialMovingAverage(MOVING_AVERAGE_DECAY,global_step)
variable_averages_op = variable_averages.apply(tf.trainable_variables())
cross_entropy_mean = tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(logits=y,labels=tf.arg_max(y_,1)))
loss = cross_entropy_mean + tf.add_n(tf.get_collection("losses"))
learning_rate = tf.train.exponential_decay(LEARNING_RATE_BASE,global_step,mnist.train.num_examples / BATCH_SIZE,LEARNING_RATE_DECAY)
train_step = tf.train.GradientDescentOptimizer(learning_rate).minimize(cross_entropy_mean,global_step)
with tf.control_dependencies([train_step,variable_averages_op]):
train_op = tf.no_op(name="train")
saver = tf.train.Saver()
with tf.Session() as sess:
tf.global_variables_initializer().run()
for i in range(TRANING_STEPS):
xs,ys = mnist.train.next_batch(BATCH_SIZE)
_,loss_value,step,ty = sess.run([train_op,loss,global_step,y],feed_dict={x:xs,y_:ys})
if i % 1000 == 0:
print("After %d training step(s), loss on training batcg is %g." % (step,loss_value))
saver.save(sess,os.path.join(MODEL_SAVE_PATH,MODEL_NAME),global_step=global_step)
def main(argv=None):
mnist = input_data.read_data_sets("MNIST_data", one_hot=True)
train(mnist)
if __name__ == '__main__':
main()<file_sep>/code/GLF_CODE/BinaryTree.py
import numpy as np
from POICount import POICounter
import copy
def isContain(a,b,c):
if a - b >= 0 and c -a >= 0:
return True
else:
return False
class TreeNode():
def __init__(self,region):
self.possibility = 1 #保存POI路径概率
self.left = None
self.right = None
self.value = "" #叶子节点存储单词本身,非叶子节点存储中间向量
self.Huffman = "" #存储the Huffman code
self.frequence = 0 #用于Huffman Tree的建立
self.region =region
class Reigion():
#a,b,c,d分别表示最左边的经度,最右边的经度,上方的纬度,下方的纬度
def __init__(self,poi_list,line):
self.poi_list = poi_list
[self.a,self.b,self.c,self.d] = line
#分隔区域,0按照经度划分(竖线),1按照纬度划分(横线)
def Spilt(self,method=0):
if method == 0:
midlon = (self.a + self.b)/2
left_set = {}
right_set = {}
for poi in self.poi_list:
if self.poi_list[poi]["lon"] <= midlon:
left_set[poi] = self.poi_list[poi]
else:
right_set[poi] = self.poi_list[poi]
left_region = Reigion(left_set,[self.a,midlon,self.c,self.d])
right_region = Reigion(right_set,[midlon,self.b,self.c,self.d])
return [left_region,right_region]
else:
midlat = (self.c+self.d)/2
above_set = {}
below_set = {}
for poi in self.poi_list:
if self.poi_list[poi]["lat"] <= midlat:
above_set[poi] = self.poi_list[poi]
else:
below_set[poi] = self.poi_list[poi]
above_region = Reigion(above_set,[self.a,self.b,self.c,midlat])
below_region = Reigion(below_set,[self.a,self.b,midlat,self.d])
return [above_region,below_region]
#判断两个区域是否交叉,返回交叉的面积占POI影响范围的比例,region1是POI影响区域,region2是节点区域
def IsCross(region1,region2):
if isContain(region1.a,region2.a,region2.b) and isContain(region1.c,region2.c,region2.d):
return (region2.b - region1.a)*(region2.d - region1.c)/(0.1*0.1)
elif isContain(region1.b,region2.a,region2.b) and isContain(region1.c,region2.c,region2.d):
return (region1.b - region2.a)*(region2.d - region1.c)/(0.1*0.1)
elif isContain(region1.b,region2.a,region2.b) and isContain(region1.d,region2.c,region2.d):
return (region1.b - region2.a)*(region1.d - region2.c)/(0.1*0.1)
elif isContain(region1.a,region2.a,region2.b) and isContain(region1.d,region2.c,region2.d):
return (region2.b - region1.a)*(region1.d - region2.c)/(0.1*0.1)
else:
return 0.0
def Add_Poi(self,poi):
for poi_id in poi:
self.poi_list[poi_id] = poi[poi_id]
class BinaryTree():
def __init__(self,pc,vec_len=200):
self.vec_len = vec_len
self.root = None
self.pc = pc
#从poi_dict中寻找决定区域的四条经纬线
min = 0
max = 360
[a,b,c,d] = [max,min,max,min]
self.theta = 0.1
for poi in pc.poi_dict:
if pc.poi_dict[poi]["lon"] < a:
a = pc.poi_dict[poi]["lon"]
if pc.poi_dict[poi]["lon"] > b:
b = pc.poi_dict[poi]["lon"]
if pc.poi_dict[poi]["lat"] < c:
c =pc.poi_dict[poi]["lat"]
if pc.poi_dict[poi]["lat"] > d:
d =pc.poi_dict[poi]["lat"]
self.root = TreeNode(Reigion(pc.poi_dict,[a,b,c,d]))
self.Built_Btree(self.root)
self.InfuenceProcess()
def Built_Btree(self, fnode):
if fnode.region.b-fnode.region.a > 2*self.theta:
[fnode.left, fnode.right] = self.Spilt(0,fnode)
self.Built_Btree(fnode.left)
self.Built_Btree(fnode.right)
elif fnode.region.d - fnode.region.c > 2*self.theta:
[fnode.left, fnode.right] = self.Spilt(1,fnode)
self.Built_Btree(fnode.left)
self.Built_Btree(fnode.right)
def Spilt(self,method,fnode):
[left_region,right_region] = fnode.region.Spilt(method)
left_node = TreeNode(left_region)
right_node = TreeNode(right_region)
return [left_node,right_node]
#添加影响力
def InfuenceProcess(self):
#寻找所有的叶子节点
leaves = []
stack = []
stack.append(self.root)
while len(stack) != 0: #先序遍历
node = stack.pop()
if node.left == None and node.right == None:
leaves.append(node)
else:
if node.right != None:
stack.append(node.right)
if node.left != None:
stack.append(node.left)
visited_list = []
for leaf in leaves:
temp_list = leaf.region.poi_list
for poi in temp_list:
if poi not in visited_list:
visited_list.append(poi)
lat = temp_list[poi]["lat"]
lon = temp_list[poi]["lon"]
influen_area = Reigion(None,[lon-self.theta/2,lon+self.theta/2,lat-self.theta/2,lat+self.theta/2])
possibility = 1
for other_leaf in leaves:
if other_leaf != leaf:
temp_possibility = Reigion.IsCross(influen_area,other_leaf.region)
if temp_possibility != 0:
temp_info = copy.copy(temp_list[poi])
temp_info["possibility"] = temp_possibility
new_poi = {poi:temp_info}
other_leaf.region.Add_Poi(new_poi)
possibility -= temp_possibility
temp_list[poi]["possibility"] = possibility
temp_list[poi]["possibility"] = possibility
#测试所有poi的概率为1
for poi in visited_list:
nums = 0
possibility = 0
for leaf in leaves:
if poi in leaf.region.poi_list.keys():
possibility += leaf.region.poi_list[poi]["possibility"]
if abs(possibility - 1) > 1e-10:
print("bad")
if __name__== '__main__':
file_object = open("C:\\Users\\Client\\PycharmProjects\\JiangHuaqi\\code\\datasets\\poidata\\Foursquare\\train.txt","rU")
pc = POICounter(file_object)
bt = BinaryTree(pc)
print("hi")
<file_sep>/code/TensorFlow/mnist_inference.py
import tensorflow as tf
INPUT_NODE = 784
OUTPUT_NODE = 10
LAYER1_NODE = 500
def get_weights_variable(shape,regularizer):
weights = tf.get_variable("weights",shape,initializer=tf.truncated_normal_initializer(stddev=0.1))
if regularizer != None:
tf.add_to_collection('losses',regularizer(weights))
return weights
def inference(input_tensor,regularizer):
with tf.variable_scope("layer1"):
weights = get_weights_variable([INPUT_NODE,LAYER1_NODE],regularizer)
biases = tf.get_variable("biases",[LAYER1_NODE],initializer=tf.constant_initializer(0.1))
layer1 = tf.nn.relu(tf.matmul(input_tensor,weights)+biases)
with tf.variable_scope("layer2"):
weights = get_weights_variable([LAYER1_NODE,OUTPUT_NODE],regularizer)
biases = tf.get_variable("biases",[OUTPUT_NODE],initializer=tf.constant_initializer(0.1))
layer2 = tf.matmul(layer1,weights)+biases
return layer2<file_sep>/venv/Lib/site-packages/tensorflow/math/__init__.py
# This file is MACHINE GENERATED! Do not edit.
# Generated by: tensorflow/python/tools/api/generator/create_python_api.py script.
"""Basic arithmetic operators.
See the @{$python/math_ops} guide.
"""
from __future__ import print_function
from tensorflow.python import acos
from tensorflow.python import acosh
from tensorflow.python import add
from tensorflow.python import asin
from tensorflow.python import asinh
from tensorflow.python import atan
from tensorflow.python import atan2
from tensorflow.python import atanh
from tensorflow.python import bessel_i0
from tensorflow.python import bessel_i0e
from tensorflow.python import bessel_i1
from tensorflow.python import bessel_i1e
from tensorflow.python import betainc
from tensorflow.python import ceil
from tensorflow.python import cos
from tensorflow.python import cosh
from tensorflow.python import digamma
from tensorflow.python import equal
from tensorflow.python import erfc
from tensorflow.python import exp
from tensorflow.python import expm1
from tensorflow.python import floor
from tensorflow.python import greater
from tensorflow.python import greater_equal
from tensorflow.python import igamma
from tensorflow.python import igammac
from tensorflow.python import invert_permutation
from tensorflow.python import less
from tensorflow.python import less_equal
from tensorflow.python import lgamma
from tensorflow.python import log
from tensorflow.python import log1p
from tensorflow.python import logical_and
from tensorflow.python import logical_not
from tensorflow.python import logical_or
from tensorflow.python import maximum
from tensorflow.python import minimum
from tensorflow.python import not_equal
from tensorflow.python import polygamma
from tensorflow.python import polyval
from tensorflow.python import reciprocal
from tensorflow.python import rint
from tensorflow.python import rsqrt
from tensorflow.python import segment_max
from tensorflow.python import segment_mean
from tensorflow.python import segment_min
from tensorflow.python import segment_prod
from tensorflow.python import segment_sum
from tensorflow.python import sin
from tensorflow.python import sinh
from tensorflow.python import squared_difference
from tensorflow.python import tan
from tensorflow.python import unsorted_segment_max
from tensorflow.python import unsorted_segment_min
from tensorflow.python import unsorted_segment_prod
from tensorflow.python import unsorted_segment_sum
from tensorflow.python import zeta
from tensorflow.python.ops.gen_nn_ops import softplus
from tensorflow.python.ops.gen_nn_ops import softsign
del print_function
<file_sep>/paper/GLF_PAPER/Learning From GLR.md
# Geographical Latent Representation for Predicting Future Visitors #
## main work ##
- They use **latent model** incorporating the geographical influence of POIs.In fact,they use word2vec to realize it. Their work is getting the probability of a user $u$ visiting a POI $l$ considering of the previous POI $C(l)$ the user $u$ visited.It is defined with a SoftMax function:$$Pr(l|C(l)) = e^{(w(l)\cdot\Phi(C(l)) )}/Z(C(l))$$
Here,$w(l)$ is the latent vector for POI $l$ and $\Phi(C(l)) = \sum_{l_{c}\in C(l)}w(l_{c})$ means the sum of vector of contextual POIS.
In order to reduce the computation of $Z(C(l))$, it use hierarchical SoftMax function.
- They use a binary tree to realize hierarchical SoftMax and use Huffman tree to construct the binary tree.Because Huffman tree can get shortest average path if we construct it based on the frequency of the distribution of POI.In order to **incorporating Geographical Influence**, they use several steps to construct binary trees.
First, they split the POIs into a hierarchical of binary regions.
Second, they assign a POI to multiple regions by considering the influence of a POI.
Finally, they construct a Huffman tree on each region.
Then, we can get the new hierarchical SoftMax function $Pr(l|C(l)) = \prod_{path_{k} \in{P(l)}}Pr(path_{k})\times Pr(l|C(l))^{path_{k}}$ to replace the normal SoftMax function.
Finally,we maximize the posterior probability.So we can get:$\Theta = argmax_{(W(\mathcal{L}),\Psi(B))}\prod_{(l,C(l))\in \mathcal{H} Pr(l|C(l))}Pr(l|C(l))$.Here, $W(\mathcal{L}) $ represent the latent representations of all the POIs and $\Psi(B)$ represent the latent representations of inner nodes.
-
---
## Question ##
1. What is POI? How we realize it in the code?
POI is Point of Interest. It means a place.
2. What is SoftMax?
<file_sep>/venv/Lib/site-packages/tensorflow/strings/__init__.py
# This file is MACHINE GENERATED! Do not edit.
# Generated by: tensorflow/python/tools/api/generator/create_python_api.py script.
"""Operations for working with string Tensors.
See the @{$python/string_ops} guide.
"""
from __future__ import print_function
from tensorflow.python import regex_full_match
from tensorflow.python import regex_replace
from tensorflow.python import string_join as join
from tensorflow.python import string_split_v2 as split
from tensorflow.python import string_strip as strip
from tensorflow.python import string_to_hash_bucket as to_hash_bucket
from tensorflow.python import string_to_hash_bucket_fast as to_hash_bucket_fast
from tensorflow.python import string_to_hash_bucket_strong as to_hash_bucket_strong
from tensorflow.python import string_to_number as to_number
from tensorflow.python import substr
del print_function
<file_sep>/README.md
# JiangHuaqi #
公式乱码,请使用谷歌浏览器并且安装https://chrome.google.com/webstore/detail/github-with-mathjax/ioemnmodlmafdkllaclgeombjnmnbima/related 插件
<file_sep>/code/GLF_CODE/datasets/poidata/readme.txt
Each line of the files follows the following format:
user_ID POI_ID coordinate checkin_time(hour:min) date_id
p.s., check-ins made on the same date have the same date_id
*****************************************************************************************************
This data is used in the experiments of the following paper:
<NAME>, <NAME>, <NAME>, <NAME>, <NAME>: Time-aware point-of-interest recommendation. SIGIR 2013: 363-372
Please kindly cite the paper if you choose to use the data.<file_sep>/venv/Lib/site-packages/tensorflow/io/__init__.py
# This file is MACHINE GENERATED! Do not edit.
# Generated by: tensorflow/python/tools/api/generator/create_python_api.py script.
"""Public API for tf.io namespace.
"""
from __future__ import print_function
from tensorflow.python import decode_base64
from tensorflow.python import decode_compressed
from tensorflow.python import decode_json_example
from tensorflow.python import decode_raw
from tensorflow.python import encode_base64
from tensorflow.python import matching_files
from tensorflow.python import parse_tensor
from tensorflow.python import read_file
from tensorflow.python import write_file
del print_function
<file_sep>/code/NeuralMemory_Adversarial_CODE/model_R.py
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
from torch.nn.parameter import Parameter
class KVMRN(nn.Module):
def __init__(self, dim, n_users, n_items, memory_size = 64):
super(KVMRN, self).__init__()
self.memory_size = memory_size
self.dim = dim
self.n_users = n_users
self.n_items = n_items
self.key_matrix = Parameter(torch.Tensor(memory_size, dim))
self.value_matrix = Parameter(torch.Tensor(memory_size, dim))
self.user_embeddings = nn.Embedding(n_users, dim, max_norm = 1)
self.item_embeddings = nn.Embedding(n_items, dim, max_norm = 1)
self.e_linear = nn.Linear(dim, dim)
self.a_linear = nn.Linear(dim, dim)
self.key_matrix.data.normal_(0, 1)
self.value_matrix.data.normal_(0, 1)
def forward(self, users, items, neg_items, use_rank_weight, margin):
# [batch_size]
# embed = [batch_size, dim}
users_embed = self.user_embeddings(users)
items_embed = self.item_embeddings(items)
neg_items_embed = self.item_embeddings(neg_items)
# attentionW
ex_users_embed = torch.unsqueeze(users_embed, 1)
attention = torch.sum((ex_users_embed - self.key_matrix) ** 2, dim = 2)
correlation_weight = F.softmax(-attention, dim = 1)
# read
# [1, memory size, dim]
ex_value_matrx = torch.unsqueeze(self.value_matrix, dim = 0)
# [batch size, memory size, 1]
ex_correlation_weight = torch.unsqueeze(correlation_weight, dim = 2)
abst_items_embed = torch.sum(ex_value_matrx * ex_correlation_weight, dim = 1)
# write
# [batch size, dim]
erase_vector = self.e_linear(items_embed)
erase_signal = F.sigmoid(erase_vector)
add_vector = self.a_linear(items_embed)
add_signal = F.tanh(add_vector)
# [batch size, 1, dim]
ex_erase_signal = torch.unsqueeze(erase_signal, 1)
# w_t(i) * e_t
# # [batch size, 1, dim]
# [batch size, memory size, 1]
erase_mul = torch.mean(ex_erase_signal * ex_correlation_weight, dim = 0)
erase = self.value_matrix * (1 - erase_mul)
add_reshaped = add_signal.view(-1, 1, self.dim)
add_mul = torch.mean(add_reshaped * ex_correlation_weight, dim = 0)
self.value_matrix.data = (erase + add_mul).data
# pos_distances = torch.sum((users_embed - items_embed) ** 2, dim = 1)
# distance_to_neg_items = torch.sum((torch.unsqueeze(users_embed, 1) - neg_items_embed) ** 2, dim = 2)
# closest_negative_item_distances = torch.min(distance_to_neg_items, dim = 1)[0]
pos_abst_distances = torch.sum((abst_items_embed - items_embed) ** 2, dim = 1)
abst_distance_to_neg_items = torch.sum((torch.unsqueeze(abst_items_embed, 1) - neg_items_embed) ** 2, dim = 2)
closest_abst_negative_item_distances = torch.min(abst_distance_to_neg_items, dim = 1)[0]
loss_per_pair = torch.clamp(pos_abst_distances - closest_abst_negative_item_distances + margin, min = 0)
# loss_per_pair = torch.clamp(
# pos_abst_distances - closest_abst_negative_item_distances + pos_distances - closest_negative_item_distances + margin,
# min = 0)
if use_rank_weight:
# indicator matrix for impostors (N x W)
impostors = (torch.unsqueeze(pos_abst_distances, -1) - abst_distance_to_neg_items + margin) > 0
# impostors = (torch.unsqueeze(pos_abst_distances, -1) - abst_distance_to_neg_items + torch.unsqueeze(pos_distances, -1) - distance_to_neg_items + margin) > 0
rank = torch.mean(impostors.float(), dim = 1) * self.n_users
loss_per_pair *= torch.log(rank + 1)
loss = torch.sum(loss_per_pair)
return loss
def abs_embed(self, users):
# [N, D]
users_embed = self.user_embeddings(users)
# attentionW
#
ex_users_embed = torch.unsqueeze(users_embed, 1)
attention = torch.sum((ex_users_embed - self.key_matrix) ** 2, dim = 2)
correlation_weight = F.softmax(-attention, dim = 1)
# read
# [1, memory size, dim]
ex_value_matrx = torch.unsqueeze(self.value_matrix, dim = 0)
# [batch size, memory size, 1]
ex_correlation_weight = torch.unsqueeze(correlation_weight, dim = 2)
read_content = torch.sum(ex_value_matrx * ex_correlation_weight, dim = 1)
return read_content
<file_sep>/paper/NMSRN_PAPER/think of paper.md
# Neural Memory Streaming Recommender Networks with Adversarial Training #
## 模型介绍 ##

A、B分别是用户和item的embedding矩阵<file_sep>/code/NeuralMemory_Adversarial_CODE/sampler.py
import numpy as np
from dataloader import movielens
import pdb
from tqdm import tqdm
class WARPSampler():
def __init__(self, train_pd, most_popular_items, n_items, batch_size = 64, n_candiidates = 10,
check_negative = True):
"""
:param user_item_matrix: the user-item matrix for positive user-item pairs
:param batch_size: number of samples to return
:param n_negative: number of negative samples per user-positive-item pair
:param result_queue: the output queue
:return: None
"""
self.train_pd = train_pd
self.batch_size = batch_size
self.n_items = n_items
self.n_candiidates = n_candiidates
self.check_negative = check_negative
self.items_set = set(list(range(1, n_items)))
self.user_to_items = dict()
for t in self.train_pd.itertuples():
self.user_to_items.setdefault(t.user, set())
self.user_to_items[t.user].add(t.item)
# self.items_set = set(list(range(1, n_items)))
# self.user_negative_samples = dict()
# for u in tqdm(self.user_to_items.keys(), desc = 'sampling user negative items for sampler'):
# self.user_negative_samples[u] = list(self.items_set - self.user_to_items[u])
def next_batch(self):
# self.train_pd = self.train_pd.sort_values(by = ['user', 'time'])
for i in range(int(self.train_pd.shape[0] / self.batch_size)):
user_positive_items_pairs = self.train_pd.iloc[i * self.batch_size: (i + 1) * self.batch_size][
['user', 'item']].values
# negative_cands = np.zeros((self.batch_size, self.n_candiidates))
# for j, u in enumerate(user_positive_items_pairs[:, 0]):
# negative_cands[j] = np.random.choice(self.user_negative_samples[u], self.n_candiidates)
# negative_cands = negative_cands.astype(int)
# yield user_positive_items_pairs, negative_cands
negative_samples = np.random.randint(1, self.n_items, size = (self.batch_size, self.n_candiidates))
if self.check_negative:
for (j, k), neg in np.ndenumerate(negative_samples):
while neg in self.user_to_items[user_positive_items_pairs[j, 0]]:
negative_samples[j, k] = neg = np.random.randint(0, self.n_items)
yield user_positive_items_pairs, negative_samples
if __name__ == '__main__':
train_pd, test_pd, most_popular_items, n_items = movielens('datasets/ml/ml20m.csv')
sampler = WARPSampler(train_pd, most_popular_items, n_items, batch_size = 10, n_candiidates = 200,
check_negative = True)
for user_pos, neg_cands in sampler.next_batch():
print(neg_cands)
# for user_pos, neg in sampler.next_batch():
# pos_user = user_pos[:, 0]
|
8a23784d1988b61bf5a62cc95ed36832dd5620c8
|
[
"Markdown",
"Python",
"Text"
] | 22 |
Python
|
dbgroup-uestc/JiangHuaqi
|
cac9cb3d75a502dce883e8b156de5f33bebcc2ab
|
df26125daa13a8301bd9829b6905608567ff76e8
|
refs/heads/master
|
<repo_name>vhs/isvhsopen<file_sep>/test/testState.js
'use strict'
const stateController = require('../controller/state.js')
const sinon = require('sinon')
const should = require('chai').should()
describe('isvhsopen state test', function () {
this.timeout(5000)
let state = null
before(function () {
this.clock = sinon.useFakeTimers()
return stateController.resetState().then(function (s) {
state = s
})
})
after(function () {
this.clock.restore()
})
it('should trigger an event when opened', function (done) {
state.once('change', function (data) {
should.exist(data)
data.should.have.property('duration', 2)
data.should.have.property('newStatus', 'open')
done()
})
this.clock.tick(2000)
state.setOpen('20:00')
})
it('should trigger an event when closed', function (done) {
state.once('change', function (data) {
should.exist(data)
data.should.have.property('duration', 1)
data.should.have.property('newStatus', 'closed')
done()
})
this.clock.tick(1000)
state.setClosed()
})
})
<file_sep>/Makefile
test:
gulp test
.PHONY: test
<file_sep>/Dockerfile
FROM node:lts
WORKDIR /usr/src/app
RUN apt-get -y update && apt -y upgrade
COPY package.json /usr/src/app/
RUN cd /usr/src/app && \
echo "Installing dependencies, this may take a while" && \
npm install
COPY . /usr/src/app
RUN node_modules/.bin/gulp build
ENV TZ=America/Vancouver
CMD [ "npm", "start" ]
EXPOSE 3000
<file_sep>/gulpfile.js
'use strict'
const gulp = require('gulp')
const mocha = require('gulp-mocha')
const watchify = require('watchify')
const browserify = require('browserify')
const source = require('vinyl-source-stream')
const buffer = require('vinyl-buffer')
const logger = require('fancy-log')
const sourcemaps = require('gulp-sourcemaps')
const sass = require('gulp-sass')(require('node-sass'))
gulp.task('test', function () {
return gulp.src('test/*.js', { read: false })
.pipe(mocha({ reporter: 'nyan' }))
})
const opts = Object.assign({}, watchify.args, {
entries: './app/main.jsx',
extensions: ['.jsx'],
debug: true
})
const b = browserify(opts)
b.on('update', bundle(false)) // on any dep update, runs the bundler
b.on('log', logger) // output build logs to terminal
b.transform('babelify', { presets: ['@babel/preset-env', '@babel/preset-react'] })
function bundle (watch) {
return function () {
let target
if (watch) {
target = watchify(b)
} else {
target = b
}
return target.bundle()
// log errors if they happen
.on('error', (err) => logger.error('Browserify Error:', err))
.pipe(source('bundle.js'))
// optional, remove if you don't need to buffer file contents
.pipe(buffer())
// optional, remove if you dont want sourcemaps
.pipe(sourcemaps.init({ loadMaps: true })) // loads map from browserify file
// Add transformation tasks to the pipeline here.
.pipe(sourcemaps.write('./')) // writes .map file
.pipe(gulp.dest('./dist'))
}
}
gulp.task('copy-fonts', function () {
return gulp.src('./node_modules/bootstrap-sass/assets/fonts/**/*')
.pipe(gulp.dest('dist/fonts'))
})
gulp.task('sass', function () {
return gulp.src('./public/stylesheets/**.sass')
.pipe(sourcemaps.init())
.pipe(sass().on('error', sass.logError))
.pipe(sourcemaps.write('./'))
.pipe(gulp.dest('./dist/css'))
})
gulp.task('sass-watch', function () {
gulp.watch('./public/stylesheets/**.sass', ['sass'])
})
gulp.task('js', bundle(false))
gulp.task('js-watch', bundle(true))
gulp.task('watch', gulp.series('copy-fonts', 'sass', 'sass-watch', 'js-watch'))
gulp.task('build', gulp.series('copy-fonts', 'sass', 'js'))
<file_sep>/test/testSlack.js
'use strict'
const slack = require('../controller/slack')
const config = require('../controller/config')
const stateController = require('../controller/state.js')
const nock = require('nock')
require('chai').should()
describe('isvhsopen slack test', function () {
let state
before(function () {
config.set('slackHookUrl', 'http://mockslack/mock/webhook')
return stateController.resetState().then(function (s) {
state = s
return slack.setup()
})
})
const mockSlack = function (cb) {
nock('http://mockslack')
.post('/mock/webhook')
.reply(200, function (uri, requestBody) {
let payload = requestBody
if (typeof payload !== 'object') { payload = JSON.parse(requestBody) }
cb(payload)
})
}
it('should alert slack when the space is open', function (done) {
this.timeout(1000)
mockSlack(function (payload) {
payload.should.have.property('text', 'VHS is now <http://isvhsopen.com|open> until 20:00')
done()
})
state.setOpen('20:00')
})
it('should alert slack when the space is closed', function (done) {
this.timeout(1000)
mockSlack(function (payload) {
payload.should.have.property('text', 'VHS is now <http://isvhsopen.com|closed>')
done()
})
state.setClosed()
})
it('should alert slack when the space is open, omitting time if not specified', function (done) {
this.timeout(1000)
mockSlack(function (payload) {
payload.should.have.property('text', 'VHS is now <http://isvhsopen.com|open>')
done()
})
state.setOpen()
})
})
<file_sep>/controller/stats.js
'use strict'
const Influx = require('influx')
const stateController = require('./state')
const debug = require('debug')('isvhsopen:controller:stats')
const config = require('./config')
let _instance
function writeEvent (event) {
debug('writeEvent:', event)
const points = [{
measurement: 'api',
tags: {
name: 'door',
space: 'vhs'
},
fields: {
duration: event.duration,
value: event.newStatus
}
}]
debug('writing points:', points)
return _instance.writePoints(points)
}
function getLastStatus () {
const resultSet = {
status: 'unknown',
last: new Date()
}
debug('getLastStatus', 'Getting last status')
const query = "select * from api where space='vhs' and \"name\"='door' order by time desc limit 1;"
debug('getLastStatus', 'Doing query')
return _instance.query(query)
.then(function (results) {
debug('Getting last status query result:')
debug(results)
if (results[0] && results[0].value !== undefined) {
debug('getLastStatus', 'Got result')
const item = results[0]
resultSet.status = item.value
resultSet.last = new Date(item.time)
return resultSet
}
debug('getLastStatus', 'Got unknown result')
debug('getLastStatus', 'results:', JSON.stringify(results))
return resultSet
})
.catch(function (err) {
debug('getLastStatus', 'Got error:', err)
return resultSet
})
}
module.exports.setup = function () {
const options = {
host: config.get('influxHost'),
port: config.get('influxPort'),
username: config.get('influxUser'),
password: config.get('influxPw'),
protocol: 'http',
database: config.get('influxDb')
}
debug('setup', options)
_instance = new Influx.InfluxDB(options)
let state
return stateController.currentState().then(function (s) {
debug('Getting currentState')
state = s
debug('Registering change listener')
state.on('change', function (event) {
writeEvent(event)
.then(function (data) {
debug('Wrote changes to influx')
debug('event:', event)
debug('data:', data)
return data
})
.catch(function (err) {
debug(err)
})
})
debug('Returning getLastStatus')
return getLastStatus()
})
.then(function (last) {
state.init(last)
})
.catch(function (err) {
debug(err)
})
}
module.exports.getLastStatus = getLastStatus
<file_sep>/test/testStats.js
'use strict'
const debug = require('debug')('isvhsopen:test:stats')
const stats = require('../controller/stats')
const config = require('../controller/config')
const stateController = require('../controller/state.js')
const nock = require('nock')
const sinon = require('sinon')
require('chai').should()
describe('isvhsopen stats test', function () {
let state
let queryMock
before(function () {
debug('Calling before')
this.clock = sinon.useFakeTimers()
debug('Setting influxHost to mockinflux')
config.set('influxHost', 'mockinflux')
debug('Setting persistent query nock')
queryMock = mockInfluxQuery()
debug('persist:', queryMock.pendingMocks())
debug('Getting stateController')
return stateController.resetState().then(function (s) {
debug('persist:', queryMock.pendingMocks())
state = s
debug('Getting initialized stats controller')
return stats.setup()
})
})
after(function () {
this.clock.restore()
})
const mockInfluxQuery = function () {
return nock('http://mockinflux:8086')
.persist()
.get('/query')
.query(true)
.reply(200, {
results: [{
statement_id: 0,
series: []
}]
})
}
const mockInfluxWrite = function (cb) {
nock('http://mockinflux:8086')
.post('/write')
.query(true)
.reply(200, function (uri, requestBody) {
cb(requestBody)
})
}
it('should write stats when vhs is open', function (done) {
this.timeout(5000)
debug('Calling mockInfluxWrite')
mockInfluxWrite(function (payload) {
payload.should.equal('api,name=door,space=vhs duration=2,value="open"')
done()
})
debug('Making clock tick')
this.clock.tick(2000)
debug('should write stats when vhs is open')
state.setOpen()
debug('opened')
})
it('should write stats when the space is closed', function (done) {
this.timeout(5000)
mockInfluxWrite(function (payload) {
payload.should.equal('api,name=door,space=vhs duration=1,value="closed"')
done()
})
this.clock.tick(1000)
state.setClosed()
})
})
<file_sep>/README.md
# IsVHSOpen.com
Is VHS open?
This application pretty much serves one purpose, tell you if VHS is open. It's also overly complicated for what it does.
Why? Why not!?
## Installing
```bash
$ npm install -g gulp #May require sudo
$ npm install
$ gulp build
```
This will install glup globally and then install all package requirements locally. 'gulp build' will then
generate relevant css and javascript.
## Running
```bash
export INFLUX_HOST=<your influx host>
export DEBUG=isvhsopen:*
npm start
```
This will launch a default server but you'll need to give it access to a influx server to be useful. A full list of
config options are found in controller/config.js
If you want a quick easy influx setup I recommend starting one via docker
```bash
docker run -d -p 8083:8083 -p 8086:8086 tutum/influxdb
```
This will start a clean influxdb container, from there you just have to create the database using the web UI:
```sql
CREATE DATABASE api;
```
## Development
If you are developing then you will want the javascript/css generated while you make changes.
```bash
gulp watch
```
When you make a change it will auto build anything that is needed.
## Testing
```bash
gulp test
```
## Running from docker
```bash
docker run -d --env-file=<env file> -p <port>:3000 vanhack/isvhsopen
```
This will pull down the app from docker hub and start it.
The env file is used to hold a list of application variables, see controller/config.js for a list of options that
can be set. The contents of the file should be in the format of VARNAME=value
If you don't want the version from docker you can build it yourself:
```bash
docker build -t vanhack/isvhsopen ./
```<file_sep>/controller/config.js
'use strict'
const convict = require('convict')
// define a schema
const conf = convict({
env: {
doc: 'The applicaton environment.',
format: ['production', 'development', 'test'],
default: 'development',
env: 'NODE_ENV'
},
influxHost: {
doc: 'Influx Host',
format: String,
default: 'localhost',
env: 'INFLUX_HOST'
},
influxPort: {
doc: 'Influx Port',
format: 'port',
default: 8086,
env: 'INFLUX_PORT'
},
influxUser: {
doc: 'Influx User',
format: String,
default: '',
env: 'INFLUX_USER'
},
influxPw: {
doc: 'Influx Password',
format: String,
default: '',
env: 'INFLUX_PASSWORD'
},
influxDb: {
doc: 'Influx Database',
format: String,
default: 'api',
env: 'INFLUX_DB'
},
slackHookUrl: {
doc: 'Slack WebHook Url ',
format: String,
default: '',
env: 'SLACK_WEB_HOOK_URL'
}
})
conf.validate()
module.exports = conf
|
d32ea3654fd24f27690b3e509cdcdca855ddf520
|
[
"JavaScript",
"Makefile",
"Dockerfile",
"Markdown"
] | 9 |
JavaScript
|
vhs/isvhsopen
|
012436439ef6f190589a0620b25818bcf15d74ee
|
f82eecf046cf58b7cf0cac2478883c488f79ed19
|
refs/heads/master
|
<file_sep>import fnmatch
from functools import partial
import os
import re
import sys
__author__ = 's.taran'
def files(path):
for path, subdirs, files in os.walk(path):
for name in files:
file_path = os.path.join(path, name)
if os.path.isfile(file_path):
yield (name, os.path.join(path, name))
def find_entries(file_path, search):
with open(file_path, 'r') as f:
for num, line in enumerate(f):
if search in line:
yield (num, line)
def grep(path, pattern, search):
for name, file_path in files(path):
if fnmatch.fnmatch(name, pattern):
for num, line in find_entries(file_path, search):
print('%s in line %i:\n %s' % (file_path, num, line), end='')
if len(sys.argv) > 2:
grep(os.getcwd(), sys.argv[1], sys.argv[2])
else:
sys.stderr.write('2 parameters expected.\n')
|
0ee5ccf50524643c1b87f32a1ca76f1fc8778bc3
|
[
"Python"
] | 1 |
Python
|
s-tar/pygrep
|
9984675609e79097d0f3417c2a5688909acab780
|
e762cad7f3063492f681d1ea60fb7d5d9391c873
|
refs/heads/master
|
<repo_name>fwu96/Basic-THREE<file_sep>/8-Snowman.js
/*jshint esversion: 6 */
// @ts-check
/**
* Code for page 8
*/
import {onWindowOnload} from "./Libs/helpers.js";
// these four lines fake out TypeScript into thinking that THREE
// has the same type as the T.js module, so things work for type checking
// type inferencing figures out that THREE has the same type as T
// and then I have to use T (not THREE) to avoid the "UMD Module" warning
/** @type typeof import("./THREE/threets/index"); */
let T;
// @ts-ignore
T = THREE;
function snowman() {
let renderer = new T.WebGLRenderer();
renderer.setSize(500,500);
document.getElementById("snowman").appendChild(renderer.domElement);
let scene = new T.Scene();
let camera = new T.PerspectiveCamera();
camera.position.z = 10;
camera.position.y = 5;
camera.position.x = 0;
camera.lookAt(0, 3, 0);
scene.add(new T.AmbientLight("white"));
let ground = new T.BoxGeometry(5, 0.1, 5);
let groundM = new T.Mesh(ground, new T.MeshLambertMaterial({color: 0x888888}));
groundM.position.y = -0.05;
scene.add(groundM);
let point = new T.PointLight("white", 1, 0, 0);
point.position.set(0, 50, 15);
scene.add(point);
// The body parts of the snowman
let sphere = new T.SphereGeometry(1.5, 30, 30);
let sphere_material = new T.MeshLambertMaterial({color: "#D0D3D4"});
let lower_body = new T.Mesh(sphere, sphere_material);
lower_body.position.set(0, 1.5, 0);
scene.add(lower_body);
let upper_body = new T.Mesh(sphere, sphere_material);
upper_body.scale.x = 0.8;
upper_body.scale.y = 0.8;
upper_body.scale.z = 0.8;
upper_body.position.set(0, 3.5, 0);
scene.add(upper_body);
let head = new T.Mesh(sphere, sphere_material);
head.scale.set(0.65, 0.65, 0.65);
head.position.set(0, 5.2, 0);
scene.add(head);
// The face parts of the snowman
let nose = new T.Mesh(new T.ConeGeometry(0.1, 0.5), new T.MeshLambertMaterial({color: "#E67E22"}));
nose.position.set(0, 6.4, 0);
nose.rotateX(Math.PI / 2);
nose.position.z = 1.3;
nose.position.y = 5.3;
scene.add(nose);
let eye_sphere = new T.SphereGeometry(0.1, 60, 60);
let right_eye = new T.Mesh(eye_sphere, new T.MeshLambertMaterial({color: "#17202A"}));
right_eye.position.set(0.4, 5.5, 1);
scene.add(right_eye);
let left_eye = new T.Mesh(eye_sphere, new T.MeshLambertMaterial({color: "#17202A"}));
left_eye.position.set(-0.4, 5.5, 1);
scene.add(left_eye);
// The hat of the snowman
let cylinder = new T.CylinderGeometry(0.4, 0.6, 0.6, 32);
let hat_body = new T.Mesh(cylinder, new T.MeshLambertMaterial({color: "#D35400"}));
hat_body.position.set(0, 6.2, 0);
scene.add(hat_body);
let ring = new T.RingGeometry(0.3, 0.34, 60);
let ring_material = new T.MeshPhongMaterial({color: "#839192"});
ring_material.specular.set("#839192");
let hat_ring = new T.Mesh(ring, ring_material);
hat_ring.position.set(-0.3, 6.5, 0);
hat_ring.rotateX(Math.PI / 4);
hat_ring.rotateY(-Math.PI / 4);
scene.add(hat_ring);
// Add another light
let sunlight = new T.PointLight("#F9D71C");
sunlight.position.set(-10, 10, 0);
scene.add(sunlight);
renderer.render(scene, camera);
}
onWindowOnload(snowman);
<file_sep>/README.md
# README file for Workbook (Assignment) 6
It is the student's responsibility to fill this in.
See <https://graphics.cs.wisc.edu/WP/cs559-sp2019/workbooks/#README_files>
## please answer these first three required questions "inline" (as in the instructions)
Name: <NAME>
WiscID: fwu62
GitHub Login: fwu96
## please answer these next (optional) questions on a line following the questions
Attributions:
Parts of the Assignment you did (or did not) do:
Did you do any bonus parts?
Notes to the Grader:
- Snowman:
- I place the camera to make the snowman be looked from the front
- I make the AmbientLight as white
- I make a white pointLight which shines on the top
- I use Sphere to make the bodies and head, and also two eyes
- I use Cone to make the nose
- I use Cylinser and Ring to make a hat of the snowman
- I add a pointLinght which is "light yellow" (a fake sunlight) comes from left
- Museum
- I place a astronaut in it
- I place a simple-version snowman in it, the rotation can be seen by its very tiny nose
- The snowman probably looks little wired because of the requirement, I used some "weird" materials and status on it, but the whole shape is still a snowman
- I place a cone in it
- The snowman and the cone have different materials
- four lights are lighting on each object
- when click on camera, different camera will focus on one specific object
<file_sep>/6-Materials.js
/*jshint esversion: 6 */
// @ts-check
/**
* Code for page 8
*/
import {onWindowOnload} from "./Libs/helpers.js";
// these four lines fake out TypeScript into thinking that THREE
// has the same type as the T.js module, so things work for type checking
// type inferencing figures out that THREE has the same type as T
// and then I have to use T (not THREE) to avoid the "UMD Module" warning
/** @type typeof import("./THREE/threets/index"); */
let T;
// @ts-ignore
T = THREE;
function materials() {
let renderer = new T.WebGLRenderer();
renderer.setSize(500,500);
document.getElementById("materials").appendChild(renderer.domElement);
let scene = new T.Scene();
// make an array of materials
// student should improve these materials
let materials = [];
// Give each material some parameters to create different appearances
materials[0] = new T.MeshStandardMaterial();
materials[1] = new T.MeshStandardMaterial();
materials[2] = new T.MeshStandardMaterial();
materials[3] = new T.MeshStandardMaterial();
materials[4] = new T.MeshStandardMaterial();
materials[5] = new T.MeshStandardMaterial();
materials[6] = new T.MeshStandardMaterial();
materials[7] = new T.MeshStandardMaterial();
materials[8] = new T.MeshStandardMaterial();
// set for sphere 1
materials[0].color.set("#FDD835");
materials[0].roughness = 0.0;
// set for sphere 2
materials[1].emissive.set("#8BC34A");
materials[1].metalness = 0.9;
materials[1].roughness = 0.5;
// set for sphere 3
materials[2].color.set("#4FC3F7");
materials[2].metalness = 0.3;
// set for sphere 4
materials[3].color.set("#5E35B1");
materials[3].emissive.set("#5E35B1");
materials[3].emissiveIntensity = 0.3;
// set for sphere 5
materials[4].color.set("#EC407A");
materials[4].roughness = 0.3;
materials[4].metalness = 0.3;
// set for sphere 6
materials[5].color.set("#00796B");
materials[5].roughness = 0.9;
// set for sphere 7
materials[6].color.set("#BF360C");
materials[6].metalness = 0.9;
// set for sphere 8
materials[7].emissive.set("#7E57C2");
materials[7].emissiveIntensity = 0.6;
materials[7].metalness = 0.1;
// set for sphere 9
materials[8].color.set("#607D8B");
materials[8].emissive.set("#607D8B");
materials[8].emissiveIntensity = 0.2;
materials[8].roughness = 0.7;
materials[8].metalness = 0.8;
// make spheres to show off the materials
let geometry = new T.SphereBufferGeometry(1,20,20);
// array of meshes
let spheres = [];
for(let i=0; i<9; i++) {
spheres[i] = new T.Mesh(geometry, materials[i]);
spheres[i].position.set( ((i%3)-1)*3, 0, Math.floor(i/3)*3);
scene.add(spheres[i]);
}
// make some lights
let l1 = new T.DirectionalLight();
let l2 = new T.PointLight();
l2.position.set(10,10,10);
scene.add(l1);
scene.add(l2);
// a camera
let camera = new T.PerspectiveCamera();
camera.position.set(0,10,10);
camera.lookAt(0,-2,0);
renderer.render(scene,camera);
}
onWindowOnload(materials);
|
4c0e09eab85abc9d6aa34b4a038dd0ffcd9990a7
|
[
"JavaScript",
"Markdown"
] | 3 |
JavaScript
|
fwu96/Basic-THREE
|
9c77d92a3c7fb928b56da7a0a8751a7e00d4dd89
|
a68cbd35da9140d105a4f5d9460f810bfeb18cd0
|
refs/heads/master
|
<repo_name>aloeb/fitness_buddy<file_sep>/models/user.js
var mongoose = require('mongoose');
var SALT_WORK_FACTOR = 10;
var UserSchema = new mongoose.Schema({
fb_id: String,
name: String,
calendar: { type: mongoose.Schema.Types.ObjectId, ref: 'Calendar' },
workouts: [ { type: mongoose.Schema.Types.ObjectId, ref: 'Workout'} ]
});
UserSchema.pre('save', function(next) {
next()
// Check authentication or something? call next when ready
});
UserSchema.methods.comparePassword = function(candidatePassword, cb) {
bcrypt.compare(candidatePassword, this.password, function(err, isMatch) {
if (err) return cb(err);
cb(null, isMatch);
});
};
module.exports = mongoose.model('User', UserSchema);
<file_sep>/models/workout.js
var mongoose = require('mongoose');
var WorkoutSchema = new mongoose.Schema({
completed_on: { type: Date },
routine: { type: mongoose.Schema.Types.ObjectId, ref: 'Routine' }
});
module.exports = mongoose.model('Workout', WorkoutSchema);<file_sep>/html/js/my-app.js
// Initialize your app
var myApp = new Framework7();
// Export selectors engine
var $$ = Dom7;
// Add view
var view1 = myApp.addView('#view-1', {
// Because we use fixed-through navbar we can enable dynamic navbar
dynamicNavbar: true
});
var view2 = myApp.addView('#view-2', {
// Because we use fixed-through navbar we can enable dynamic navbar
dynamicNavbar: true
});
var view6 = myApp.addView('#view-6', {
// Because we use fixed-through navbar we can enable dynamic navbar
dynamicNavbar: true
});
var view4 = myApp.addView('#view-4', {
// Because we use fixed-through navbar we can enable dynamic navbar
dynamicNavbar: true
});
// var view9 = myApp.addView('#view-9', {
// // Because we use fixed-through navbar we can enable dynamic navbar
// dynamicNavbar: true
// });
myApp.onPageInit('login', function(page){
//your code for init charts should be there
console.log("I'm here");
// var randomScalingFactor = function(){ return Math.round(Math.random()*100)};
// var lineChartData = {
// labels : ["January","February","March","April","May","June","July"],
// datasets : [
// {
// label: "My First dataset",
// fillColor : "rgba(220,220,220,0.2)",
// strokeColor : "rgba(220,220,220,1)",
// pointColor : "rgba(220,220,220,1)",
// pointStrokeColor : "#fff",
// pointHighlightFill : "#fff",
// pointHighlightStroke : "rgba(220,220,220,1)",
// data : [randomScalingFactor(),randomScalingFactor(),randomScalingFactor(),randomScalingFactor(),randomScalingFactor(),randomScalingFactor(),randomScalingFactor()]
// },
// {
// label: "My Second dataset",
// fillColor : "rgba(151,187,205,0.2)",
// strokeColor : "rgba(151,187,205,1)",
// pointColor : "rgba(151,187,205,1)",
// pointStrokeColor : "#fff",
// pointHighlightFill : "#fff",
// pointHighlightStroke : "rgba(151,187,205,1)",
// data : [randomScalingFactor(),randomScalingFactor(),randomScalingFactor(),randomScalingFactor(),randomScalingFactor(),randomScalingFactor(),randomScalingFactor()]
// }
// ]
// }
//
// var ctx = document.getElementById("myChart");
// window.myLine = new Chart(ctx).Line(lineChartData, {
// responsive: true
// });
});
angular.module('myApp', ['ngCookies', 'mwl.calendar', 'ui.bootstrap', 'ngAnimate', 'angularMoment', 'markau.timer'])
.config(['$locationProvider', function($locationProvider) {
$locationProvider.html5Mode({
enabled: true,
requireBase: false
});
}])
.config(['calendarConfig', function(calendarConfig) {
// View all available config
console.log(calendarConfig);
// Change the month view template globally to a custom template
// calendarConfig.templates.calendarMonthView = 'path/to/custom/template.html';
// Use either moment or angular to format dates on the calendar. Default angular. Setting this will override any date formats you have already set.
calendarConfig.dateFormatter = 'moment';
// This will configure times on the day view to display in 24 hour format rather than the default of 12 hour
calendarConfig.allDateFormats.moment.date.hour = 'HH:mm';
// This will configure the day view title to be shorter
calendarConfig.allDateFormats.moment.title.day = 'ABC ddd D MMM';
// This will set the week number hover label on the month view
calendarConfig.i18nStrings.weekNumber = 'Week {week}';
// This will display all events on a month view even if they're not in the current month. Default false.
calendarConfig.displayAllMonthEvents = true;
// Make the week view more like the day view, ***with the caveat that event end times are ignored***.
calendarConfig.showTimesOnWeekView = true;
}])
.controller('MainController', [
'$scope',
'$cookies',
'$location',
'$http',
'moment',
'calendarConfig',
function($scope, $cookies, $location, $http, moment, alert, calendarConfig) {
var vm = this;
// $http.get('http://httpbin.org/ip').then(function(){console.log("success")}, function(){console.log("error")});
console.log(calendarConfig);
//These variables MUST be set as a minimum for the calendar to work
$scope.viewDate = new Date();
$scope.calendarView = 'week';
$scope.time = 300;
$scope.status = 'notstarted';
var actions = [{
label: '<i class=\'glyphicon glyphicon-pencil\'></i>',
onClick: function(args) {
alert.show('Edited', args.calendarEvent);
}
}, {
label: '<i class=\'glyphicon glyphicon-remove\'></i>',
onClick: function(args) {
alert.show('Deleted', args.calendarEvent);
}
}];
// console.log(moment().startOf('week').add(1, 'week').add(9, 'hours').toDate());
$scope.events = [];
$scope.workout = {};
$scope.workout.name = "";
$scope.workout.start = "2017-04-26T18:00";
$scope.workout.end = "2017-04-26T19:00";
$scope.cellIsOpen = true;
$scope.addEvent = function() {
$scope.events.push({
title: 'New event',
startsAt: moment().startOf('day').toDate(),
endsAt: moment().endOf('day').toDate(),
color: calendarConfig.colorTypes.important,
draggable: true,
resizable: true
});
};
$scope.eventClicked = function(event) {
if (event.type == 'suggestion') {
console.log("hello world")
$scope.workout.start = event.startsAt
$scope.workout.end = event.endsAt
} else {
alert.show('Clicked', event);
}
};
$scope.eventEdited = function(event) {
alert.show('Edited', event);
};
$scope.eventDeleted = function(event) {
alert.show('Deleted', event);
};
$scope.eventTimesChanged = function(event) {
alert.show('Dropped or resized', event);
};
$scope.toggle = function($event, field, event) {
$event.preventDefault();
$event.stopPropagation();
event[field] = !event[field];
};
$scope.timespanClicked = function(date, cell) {
if ($scope.calendarView === 'month') {
if (($scope.cellIsOpen && moment(date).startOf('day').isSame(moment($scope.viewDate).startOf('day'))) || cell.events.length === 0 || !cell.inMonth) {
$scope.cellIsOpen = false;
} else {
$scope.cellIsOpen = true;
$scope.viewDate = date;
}
} else if ($scope.calendarView === 'year') {
if (($scope.cellIsOpen && moment(date).startOf('month').isSame(moment($scope.viewDate).startOf('month'))) || cell.events.length === 0) {
$scope.cellIsOpen = false;
} else {
$scope.cellIsOpen = true;
$scope.viewDate = date;
}
}
}
console.log(vm);
var token = $cookies.get('token')
if (!token) {
var searchObject = $location.search()
if (searchObject.token) {
$cookies.put('token', searchObject.token)
token = searchObject.token
}
}
$scope.token = token
console.log(token);
today = new Date(Date.now())
next_week = new Date(Date.now() + (1000*60*60*24*7))
$http.post('http://localhost:8081/api/v1/users/get_calendar', {"token": token, "start_time": today, "end_time": next_week}).then(function(data){
$scope.googleCalendar = data;
console.log(data)
data.data.events.forEach((event) => {
$scope.events.push({
title: event.summary,
endsAt: new Date(Date.parse(event.end.dateTime)),
startsAt: new Date(Date.parse(event.start.dateTime)),
color: { // can also be calendarConfig.colorTypes.warning for shortcuts to the deprecated event types
primary: '#686868', // the primary event color (should be darker than secondary)
secondary: '#d1e8ff' // the secondary event color (should be lighter than primary)
},
draggable: true,
resizable: true,
actions: actions
})
})
}, function(){
console.log("error")
});
later_today = new Date(Date.now())
later_today.setHours(24,0,0,0)
$http.post('http://localhost:8081/api/v1/users/get_rec_workout_times', {"token": token, "start_time": today, "end_time": later_today}).then(function(data){
$scope.googleCalendar = data;
console.log(data);
data.data.suggestions.forEach((time) => {
$scope.events.push({
title: "Possible workout time!",
startsAt: new Date(Date.parse(time)),
endsAt: new Date(Date.parse(time)+(1000*60*90)),
color: { // can also be calendarConfig.colorTypes.warning for shortcuts to the deprecated event types
primary: '#686868', // the primary event color (should be darker than secondary)
secondary: '#e2a1a1' // the secondary event color (should be lighter than primary)
},
draggable: true,
resizable: true,
type: 'suggestion'
})
})
}, function(){
console.log("error")
});
$http.post('http://localhost:8081/api/v1/users/get_user', {"token": token}).then(function(data){$scope.username = data.data.name; $scope.userdata = data.data; console.log(data); console.log(data.data.name);}, function(){console.log("error")});
$http.post('http://localhost:8081/api/v1/users/get_workouts', {"token": token}).then(function(data){$scope.workouts = data.data; console.log(data);}, function(){console.log("error")});
$scope.login = function() {
window.location.href = "http://localhost:8081/api/v1/auth/facebook";
}
$scope.logout = function() {
$cookies.remove('token')
window.location.href = "http://localhost:8081/";
}
$scope.logged_in = function() {
return (typeof $scope.token != 'undefined')
}
$scope.import_cal = function() {
window.location.href = "http://localhost:8081/api/v1/users/auth_google?state=" + $scope.token
}
$scope.get_rec_workouts = function() {
console.log("yo")
$http.post('http://localhost:8081/api/v1/users/get_routines', {"token": token, "filters": { "tags": ['legs', 'core'] }}).then(function(data){
console.log(data.data[0]._id)
$scope.workout.name = data.data[0]._id
}, function(){
console.log("error")
});
}
$scope.workoutSubmit = function(){
console.log($scope.workout.start);
$http.post('http://localhost:8081/api/v1/users/schedule_workout', {"token": token, "date": $scope.workout.start, "workout": $scope.workout.name}).then(function(data){
console.log(data)
}, function(){
console.log("error")
});
}
$scope.get_rec_workouts()
// console.log($username);
// console.log($password);
}
]);
// Generate dynamic page
var dynamicPageIndex = 0;
function createContentPage() {
mainView.router.loadContent(
'<!-- Top Navbar-->' +
'<div class="navbar">' +
' <div class="navbar-inner">' +
' <div class="left"><a href="#" class="back link"><i class="icon icon-back"></i><span>Back</span></a></div>' +
' <div class="center sliding">Dynamic Page ' + (++dynamicPageIndex) + '</div>' +
' </div>' +
'</div>' +
'<div class="pages">' +
' <!-- Page, data-page contains page name-->' +
' <div data-page="dynamic-pages" class="page">' +
' <!-- Scrollable page content-->' +
' <div class="page-content">' +
' <div class="content-block">' +
' <div class="content-block-inner">' +
' <p>Here is a dynamic page created on ' + new Date() + ' !</p>' +
' <p>Go <a href="#" class="back">back</a> or go to <a href="schedule.html">Services</a>.</p>' +
' </div>' +
' </div>' +
' </div>' +
' </div>' +
'</div>'
);
return;
};
<file_sep>/models/exercise.js
var mongoose = require('mongoose');
var ExerciseSchema = new mongoose.Schema({
name: String,
description: String,
gym_area: String,
type: String,
popularity: Number,
creator: { type: mongoose.Schema.Types.ObjectId, ref: 'User' },
});
module.exports = mongoose.model('Exercise', ExerciseSchema);<file_sep>/html/js/corec.js
$(function() {
var user_token
function initUser() {
user_token = $.cookie("token")
if (!user_token) {
window.location.href = "http://localhost:8081/"
return false
}
return true
}
//Avoid using api calls too frequently not sure what if any rate limits tehre are, we might be able to find out, caching would be best probaly
/* 'https://www.purdue.edu/DRSFacilityUsageAPI/locations' JSON response looks like:
[{
"LocationId": "b0e732b7-a89b-42e7-9465-03ba48769a62", #unique to each area
"LocationName": "Field 2", #also unique
"ZoneId": "fdbd39c0-689b-4199-b366-54a2577ef35f", #zone area belongs in, non unique - group by this
"ZoneName": "TREC", #goes with ZoneId above, probably unique (no way to get zones alone from API?)
"Capacity": 50,
"Active": true, #looks like inverse of closed might just be wether people are here or not
"Closed": false, #key off of this for hours
"LastUpdatedTime": "2017-02-21T23:30:41.393", #time date stamp of last update
"ZoneAndLocationName": null #not sure what this is, always seems to be null, ignore I guess
},{},{}...]
*/
function coRecHours(){
var xhr = $.ajax({type: 'POST',
url: 'http://localhost:8081/api/v1/corec/get_usage',
data: JSON.stringify({ usage_type: "weeklytrends", token: user_token }),
contentType: 'application/json',
//contentType: 'text/plain',
//crossDomain: true,
success: function(data){
//dynamicAlert(data)
}})
}
// Thursday 5:30AM–12AM 5:30, 0
// Friday 5:30AM–12AM 5:30, 0
// Saturday 8AM–12AM 8, 0
// Sunday 10AM–12AM 10, 0
// Monday 5:30AM–12AM 5:30, 0
// Tuesday 5:30AM–12AM 5:30, 0
// Wednesday 5:30AM–12AM 5:30, 0
function dynamicAlert(){
var alert = document.getElementById("dynamic-alert");
if(moment().get('hour') < "12" || true){
// <div class='alert alert-danager alert-dismissible' role='alert'><button type='button' class='close' data-dismiss='alert' aria-label='Close'><span aria-hidden='true'>×</span></button><strong>The CoRec is closing soon!</strong> It closes at 12am.</div>
// $('#dynamic-alert').append("<div class='alert alert-danger alert-dismissible' role='alert'><button type='button' class='close' data-dismiss='alert' aria-label='Close'><span aria-hidden='true'>×</span></button><strong>Oh snap! The CoRec is closed!</strong> It reopens tomorrow at: 5:30am</div>")
$('#dynamic-alert').append("<div class='alert alert-success alert-dismissible' role='alert'><button type='button' class='close' data-dismiss='alert' aria-label='Close'><span aria-hidden='true'>×</span></button><strong>The CoRec is currently open!</strong> It closes tonight at Midnight</div>")
}
}
function getKeyArray(hash) {
var keys = [];
for(var i in hash) {
keys = keys.concat(i);
}
return keys;
}
function getValueArray(hash) {
var values = [];
for(var i in hash) {
values = values.concat(hash[i]);
}
return values;
}
function initLastUpdatedTime(locationid, el) {
/* 'https://www.purdue.edu/DRSFacilityUsageAPI/lastupdatedtime/' JSON response looks like:
{
"LocationId": "f670d7d7-c99e-4ef4-9c4f-22008753331a", #without LocationId the most recenlty updated area is returned
"LocationName": "Upper Track", #name
"ZoneId": null, #not sure why this is null...
"ZoneName": null, #same here it only seems null when called with lastupdatedtime...
"Capacity": 20,
"Active": false,
"Closed": false,
"LastUpdatedTime": "2017-02-21T08:56:24.14",
"ZoneAndLocationName": null
}
also available: https://www.purdue.edu/DRSFacilityUsageAPI/lastupdatedtime/{LocationId}
*/
var data = { usage_type: "lastupdatedtime", token: <PASSWORD>_token }
if(typeof locationid != 'undefined') {
data["location_id"] = locationid;
}
var xhr = $.ajax({type: 'POST',
url: 'http://localhost:8081/api/v1/corec/get_usage',
data: JSON.stringify(data),
contentType: 'application/json',
//contentType: 'text/plain',
//crossDomain: true,
success: function(data){
viewLastUpdatedTime(data, el);
}})
}
function viewLastUpdatedTime(data, el) {
if(typeof data == 'undefined') {
return null;
}
if(typeof el == 'undefined') {
el = $("#lastupdatedtime");
}
var d = moment(data.LastUpdatedTime);
el.html("Most recently updated at " + moment(data.LastUpdatedTime).format('h:mm a'));
}
function initCurrentActivityCharts() {
/* 'https://www.purdue.edu/DRSFacilityUsageAPI/currentactivity/' JSON response looks like:
[{
"LocationId": "61b3abc1-bb87-413b-b933-827bc6d58e0f",
"LocationName": "Colby Strength",
"ZoneId": "19d3ae76-7e6b-4ef1-8fcf-27b4537d6cfc",
"ZoneName": "Basement",
"Capacity": 100,
"Headcount": 27,
"EntryDate": "2017-02-14T07:13:44.337"
},{},{}...]
Seems like it doesn't matter wether a LocationId is passed to this, data returns for all locations...need to investigate this more
*/
var spinner = new Spinner({
length: 5
}).spin();
$('#currentactivity').append(spinner.el);
var arg1 = "currentactivity"
var arg2 = "locations"
var currentactivityData, locationData;
$.when(
$.ajax({type: 'POST',
url: 'http://localhost:8081/api/v1/corec/get_usage',
data: JSON.stringify({ usage_type: arg1, token: user_token }),
contentType: 'application/json',
//contentType: 'text/plain',
//crossDomain: true,
success: function(data){
currentactivityData = data;
}}),
$.ajax({type: 'POST',
url: 'http://localhost:8081/api/v1/corec/get_usage',
data: JSON.stringify({ usage_type: arg2, token: user_token }),
contentType: 'application/json',
//contentType: 'text/plain',
//crossDomain: true,
success: function(data){
locationData = data;
}})
).then(function() {
if (currentactivityData && locationData) {
viewCurrentActivityCharts(currentactivityData, locationData);
spinner.stop();
}
else {
// Request for web data didn't work, handle it
// console.log("Error gettting data from either: url: " + url + " or url2: " + url2);
}
});
}
function getLocationData(){
var locationdata;
var xhr = $.ajax({type: 'POST',
url: 'http://localhost:8081/api/v1/corec/get_usage',
data: JSON.stringify({ usage_type: "locations", token: <PASSWORD> }),
contentType: 'application/json',
//contentType: 'text/plain',
//crossDomain: true,
success: function(data){
return data
}});
return xhr;
}
function viewCurrentActivityCharts(data, data2) {
if(typeof data == 'undefined') {
return null;
}
var closedZones = {};
var entrydates = {};
var capacities = {};
var headcounts = {};
var locationnames = {};
var zones = {};
var zonenames = {};
var lastupdatedtimes = {};
var tabcount = 0;
var lastZoneId = "";
var hotTab = false;
for (var area in data2) {
closedZones[data2[area].LocationId] = data2[area].Closed;
lastupdatedtimes[data2[area].LocationId] = data2[area].LastUpdatedTime;
}
// console.log(closedZones);
for (var area in data) {
// console.log(area);
if (lastZoneId != data[area].ZoneId) {
lastZoneId = data[area].ZoneId;
tabcount++;
var tab = $(
"<li><a href='#tabs-" + tabcount + "' role='tab' data-toggle='tab'>" + data[area].ZoneName + "</a></li>"
);
tab.appendTo("#tabs");
var tabsection = $(
"<div id='tabs-" + tabcount + "' class='tab-pane'></div>"
);
tabsection.appendTo("#tab-panes");
}
if(data[area].Capacity == null){
capacities[data[area].LocationId] = 0;
}
else {
capacities[data[area].LocationId] = parseInt(data[area].Capacity);
}
if(data[area].Headcount == null){
headcounts[data[area].LocationId] = 0;
}
else {
headcounts[data[area].LocationId] = parseInt(data[area].Headcount);
}
locationnames[data[area].LocationId] = data[area].LocationName;
zones[data[area].LocationId] = tabcount;
zonenames[data[area].LocationId] = data[area].ZoneName;
entrydates[data[area].LocationId] = data[area].EntryDate;
if (headcounts[data[area].LocationId] / capacities[data[area].LocationId] >= 0.6) {
if (!hotTab) {
var tab = $(
"<li><a href='#tabs-0' role='tab' data-toggle='tab'>Hot Spots</a></li>"
);
tab.prependTo("#tabs");
var tabsection = $(
"<div id='tabs-0' class='tab-pane'></div>"
);
tabsection.prependTo("#tab-panes");
hotTab = true;
}
}
}
var chartdata = [];
for (var key in headcounts) {
if(capacities[key] < headcounts[key]){
freeSpace = 0;
}
else {
freeSpace = capacities[key] - headcounts[key];
}
var color;
if(headcounts[key] / capacities[key] >= 0.8) {
color = "red";
} else if(headcounts[key] / capacities[key] >= 0.6) {
color = "orange";
} else {
color = "green";
}
var data2 = {
labels: [
"Occupied",
"Available"],
datasets: [{
data: [headcounts[key], freeSpace],
backgroundColor: [color]
}]
};
var HTML = "<div class='chartitem col-xs-12'>";
HTML += "<div class='row'>";
HTML += "<a onclick='app.initTrendsCharts(\"" + key + "\");'>";
HTML += "<h2 class='chartcaption capitalized col-xs-6'><span class='chartcaption-title'>" + locationnames[key] + "</span>";
HTML += "<div class='chartcaption-data'>";
if (closedZones[key] == false && headcounts[key] != '0') {
HTML += headcounts[key];
HTML += " / "
HTML += capacities[key];
}
// console.log(closedZones[key]);
// console.log(key);
if(closedZones[key]) {
HTML += "<span class='label label-danger'>Closed</span>";
}
if(headcounts[key] == '0' && closedZones[key] == false) {
HTML += "<div class='chartcaption-data'><span class='label label-success'>Empty</span></div>";
}
HTML += "</div>";
HTML += "</h2>";
HTML += "<div class='col-xs-6 chart' id=" + key + ">";
HTML += "<canvas id='chart" + key + "' width='100' height='100'></canvas>";
HTML += "</div>";
HTML += "</a>";
HTML += "</div>";
if(entrydates[key] != null){
HTML += "<div class='smaller'>Last updated " + moment(entrydates[key]).format('h:mm a');+"</div>"
}
else {
HTML += "<div class='smaller'>Last updated " + moment(lastupdatedtimes[key]).format('h:mm a');+"</div>"
}
HTML += "</div>";
var chartitem = $(HTML);
chartitem.appendTo("#tabs-" + zones[key]);
$("#tabs a:first").tab('show');
var el = document.getElementById("chart" + key);
var ctx = el.getContext("2d");
var chart = new Chart(ctx, {
type: 'doughnut',
data: data2,
options: {
cutoutPercentage: 80,
legend: {display: false},
tooltips: {displayColors: false}
}
});
// console.log(chartdata[key]);
// console.log(data);
if (hotTab && headcounts[key] / capacities[key] >= 0.6) {
var chartitemHot = $(
"<div class='chartitem col-xs-12'>" +
"<div class='row'>" +
"<a onclick='app.initTrendsCharts(\"" + key + "\");'>" +
"<h2 class='chartcaption capitalized col-xs-6'><span class='chartcaption-title'>" + locationnames[key] + "</span>" +
"<div class='chartcaption-data'>" + zonenames[key] + "</div>" +
"<div class='chartcaption-data'>" + headcounts[key] + " / " + capacities[key] + "</div>" +
"</h2>" +
"<div class='col-xs-6 chart'>" +
"<canvas id='chart" + key + "-hot' width='100' height='100'></canvas>" +
"</div>" +
"</a>" +
"</div>" +
"</div>");
chartitemHot.appendTo("#tabs-0");
var elHot = document.getElementById("chart" + key + "-hot");
var ctxHot = elHot.getContext("2d");
var chartHot = new Chart(ctxHot, {
type: 'doughnut',
data: data2,
options: {
cutoutPercentage: 80,
legend: {display: false},
tooltips: {displayColors: false}
}
});
}
}
}
function initMonthlyTrendsChart(locationid) {
var data = { usage_type: "monthlytrends", token: <PASSWORD>_token}
if(typeof locationid != 'undefined') {
data["location_id"] = locationid
}
var xhr = $.ajax({type: 'POST',
url: 'http://localhost:8081/api/v1/corec/get_usage',
data: JSON.stringify(data),
contentType: 'application/json',
//contentType: 'text/plain',
//crossDomain: true,
success: function(data){
viewMonthlyTrendsChart(data);
}});
}
function viewMonthlyTrendsChart(data) {
// console.log(data);
if(typeof data == 'undefined') {
return null;
}
var headcounts = {};
var capacities = {};
var counter = 0;
for(var stat in data) {
// console.log(stat);
headcounts[data[stat].MonthName] = parseInt(data[stat].Headcount);
capacities[data[stat].MonthName] = parseInt(data[stat].Capacity);
counter++;
}
var chartdata = {};
var options = {};
var labels = getKeyArray(headcounts).reverse();
// console.log(labels);
var datapoints = getValueArray(headcounts).reverse();
// console.log(datapoints);
var maxCapacity = Math.max.apply(null, getValueArray(capacities));
// console.log(maxCapacity);
var datasets = [
{
label: "Attendance",
backgroundColor: "rgba(54, 162, 235, 0.2)",
borderColor: "rgba(54, 162, 235, 1)",
pointBorderColor: "rgba(54, 162, 235, 1)",
pointBackgroundColor: "rgba(54, 162, 235, 1)",
pointHoverBackgroundColor: "rgba(54, 162, 235, 1)",
pointHoverBorderColor: "rgba(54, 162, 235, 1)",
data: datapoints
}
];
el = document.getElementById("monthlychart");
while (el.firstChild) {
el.removeChild(el.firstChild);
// console.log("Remvoing Child el");
}
var canvas = document.createElement("canvas");
canvas.width = el.getAttribute("width");
canvas.height = el.getAttribute("height");
el.appendChild(canvas);
var ctx = canvas.getContext("2d");
var chart = new Chart(ctx, {
type: 'line',
data: {
labels: labels,
datasets: datasets
},
options: {
legend: {display: false},
tooltips: {displayColors: false},
scales: {
yAxes: [{
ticks: {
min: 0,
max: maxCapacity
}
}]
}
}
});
}
function updateMonthlyTrendsChart(locationid) {
var chart = document.getElementById("monthlychart");
if(chart) {
var containerWidth = $(chart).parent().width();
$(chart).attr('width', containerWidth); //max width
$(chart).attr('height', 250 ); //max height
initMonthlyTrendsChart(locationid);
}
}
function updateWeekTrendsChart(locationid) {
/* 'https://www.purdue.edu/DRSFacilityUsageAPI/weeklytrends/ + locationID' JSON response looks like:
This is with location upper gym
[{
"LocationID": "98450599-5986-4324-b1fb-d4de0412b7ed",
"LocationName": "Upper Gym",
"Headcount": 12,
"EntryDate": "0001-01-01T00:00:00",
"DayOfWeek": 6, #each day of the week
"Hour": 23, #every hour, military time no AM or PM
"DayName": "Saturday"
},
{
"LocationID": "98450599-5986-4324-b1fb-d4de0412b7ed",
"LocationName": "Upper Gym",
"Headcount": 11,
"EntryDate": "0001-01-01T00:00:00",
"DayOfWeek": 6,
"Hour": 22,
"DayName": "Saturday"
},
{},{},{}...]
TODO: parts of this are a little janky, averages are way off. Comments below...
Loop over this data to build weekly chart, we should go back and ignore midnight to 6 am when the CoRec is closed. Not done currently
EDIT: actually don't do it based on fixed times ignore the data point if the Corec or the location itself is closed. We have this data as part of the locations api
EDIT but not historical context
we will have to fudge this or use Mongo or something. Not doing right now.
it apears you can call weeklytrends without a locationid but that just returns info with CoRec, that might be junk or it could be staff count or something
ignore for now unless we find value in the data EDIT: The number seems to small to be staff since it says 6, 8, 11...not even sure what it represents so no value right now (don't use)
*/
var data1 = { usage_type: "weeklytrends", token: <PASSWORD>_token }
if(typeof locationid != 'undefined') {
data1["location_id"] = locationid
}
var data2 = { usage_type: "locations", token: <PASSWORD>_token }
if(typeof locationid != 'undefined') {
data2["location_id"] = locationid
}
var chart = document.getElementById("weeklychart");
if(chart) {
var containerWidth = $(chart).parent().width();
$(chart).attr('width', containerWidth); //max width
$(chart).attr('height', 250 ); //max height
var spinner = new Spinner({
length: 5
}).spin();
$('#modal-body').append(spinner.el);
var weeklyData, locationData;
$.when(
$.ajax({type: 'POST',
url: 'http://localhost:8081/api/v1/corec/get_usage',
data: JSON.stringify(data1),
contentType: 'application/json',
//contentType: 'text/plain',
//crossDomain: true,
success: function(data){
weeklyData = data;
}}),
$.ajax({type: 'POST',
url: 'http://localhost:8081/api/v1/corec/get_usage',
data: JSON.stringify(data2),
contentType: 'application/json',
//contentType: 'text/plain',
//crossDomain: true,
success: function(data){
locationData = data;
}})
).then(function() {
if (weeklyData && locationData) {
viewWeekTrendsChart(weeklyData, locationData);
spinner.stop();
}
else {
// Request for web data didn't work, handle it
// console.log("Error gettting data from either: url: " + url + " or url2: " + url2);
}
});
}
}
function initWeeklyTrendsChart(locationid, x, y, width, height) {
var data = { usage_type: "weeklytrends", token: <PASSWORD>_token }
if(typeof locationid != 'undefined') {
data["location_id"] = locationid;
}
var xhr = $.ajax({type: 'POST',
url: 'http://localhost:8081/api/v1/corec/get_usage',
data: JSON.stringify(data),
contentType: 'application/json',
//contentType: 'text/plain',
//crossDomain: true,
success: function(data){
viewWeeklyTrendsChart(data, x, y, width, height);
}})
//commenting out for now, this is only real time, so have to work around it
/*
TODO: get wether the location is active or closed
if(typeof locationid != 'undefined') {
url2 = "https://www.purdue.edu/DRSFacilityUsageAPI/locations/" + locationid;
}
var xhr = $.getJSON(url2).done(function(data) {
viewWeeklyTrendsChart(data, x, y, width, height);
}).fail(function(jqxhr, textStatus, error) {
// console.log("Error: " + error);
});
*/
}
function viewWeekTrendsChart(data, locationdata) {
// console.log("RUNNING HERE");
// console.log(data);
// console.log(locationdata);
if(typeof data == 'undefined') {
return null;
}
// console.log(data);
var headcounts = [];
for(var i = 0; i < 7; i++) {
for(var j = 0; j < 24; j++) {
headcounts[i * 24 + j] = 0;
}
}
// console.logheadcounts);
var time;
for(var stat in data) {
headcounts[parseInt(data[stat].DayOfWeek) * 24 + parseInt(data[stat].Hour)] = parseInt(data[stat].Headcount);
}
// console.loghours);
var averages = [];
for(var k = 0; k < 7; k++){
var total = 0;
var count = 0;
for(var l = 0; l < 24; l++){
if(headcounts[k * 24 + l] != 0){
total += headcounts[k * 24 + l];
count++;
}
}
averages[k] = Math.round(total / count);
// console.log(total);
// console.log(count);
}
// console.log("HERE");
// console.log(averages);
// console.log("TOO FAR");
// console.log(headcounts);
var el = document.getElementById("weeklychart");
while (el.firstChild) {
el.removeChild(el.firstChild);
// console.log("Remvoing Child el");
}
var canvas = document.createElement("canvas");
canvas.width = el.getAttribute("width");
canvas.height = el.getAttribute("height");
el.appendChild(canvas);
ctx = canvas.getContext("2d");
chart = new Chart(ctx, {
type: 'bar',
data: {
labels: ["S", "M", "T", "W", "Th", "F", "S"],
datasets: [
{
label: "Average",
backgroundColor: [
'rgba(255, 99, 132, 0.2)',
'rgba(54, 162, 235, 0.2)',
'rgba(255, 206, 86, 0.2)',
'rgba(75, 192, 192, 0.2)',
'rgba(153, 102, 255, 0.2)',
'rgba(255, 159, 64, 0.2)',
'rgba(255, 98, 205,0.2)'
],
borderColor: [
'rgba(255,99,132,1)',
'rgba(54, 162, 235, 1)',
'rgba(255, 206, 86, 1)',
'rgba(75, 192, 192, 1)',
'rgba(153, 102, 255, 1)',
'rgba(255, 159, 64, 1)',
'rgba(255, 98, 205,1)'
],
borderWidth: 1,
data: averages,
}
]
},
options: {
legend: {display: false},
tooltips: {displayColors: false},
scales: {
yAxes: [{
ticks: {
min: 0,
max: locationdata.Capacity
}
}]
}
}
});
}
function viewWeeklyTrendsChart(data, x, y, width, height) {
if(typeof data == 'undefined') {
return null;
}
// console.log(data);
var valuesx = [];
var valuesy = [];
var headcounts = [];
for(var i = 0; i < 7; i++) {
for(var j = 0; j < 24; j++) {
valuesx[i * 24 + j] = i + 1;
valuesy[i * 24 + j] = j;
headcounts[i * 24 + j] = 0;
}
}
// console.log(valuesx);
// console.log(valuesy);
// console.log(headcounts);
var time;
for(var stat in data) {
headcounts[parseInt(data[stat].DayOfWeek) * 24 + parseInt(data[stat].Hour)] = parseInt(data[stat].Headcount);
// if(parseInt(data[stat].Headcount) != 0){
// if(parseInt(data[stat].Hour) > 12){
// time = parseInt(data[stat].Hour) - 12 + "pm";
// }
// else{
// time = parseInt(data[stat].Hour) + "am";
// }
// hours[parseInt(data[stat].DayOfWeek)][parseInt(data[stat].Hour)] = time;
// }
}
// console.loghours);
var averages = [];
for(var k = 0; k < 7; k++){
var total = 0;
var count = 0;
for(var l = 0; l < 24; l++){
if(headcounts[k * 24 + l] != 0){
total += headcounts[k * 24 + l];
count++;
}
}
averages[k] = Math.round(total / count);
// console.log(total);
// console.log(count);
}
// console.log("HERE");
// console.log(averages);
// console.log("TOO FAR");
// console.log(headcounts);
var weekly = document.getElementById("hourlychart");
while (weekly.firstChild) {
weekly.removeChild(weekly.firstChild);
// console.log("Remvoing Child weekly");
}
// var canvas2 = document.createElement("canvas");
// canvas2.width = weekly.getAttribute("width");
// canvas2.height = weekly.getAttribute("height");
// weekly.appendChild(canvas2);
// ctx2 = canvas2.getContext("2d");
// chart2 = new Chart(ctx2, {
// type: 'bar',
// data: {
// labels: ["S", "M", "T", "W", "Th", "F", "S"],
// datasets: [
// {
// label: "My First dataset",
// backgroundColor: [
// 'rgba(255, 99, 132, 0.2)',
// 'rgba(54, 162, 235, 0.2)',
// 'rgba(255, 206, 86, 0.2)',
// 'rgba(75, 192, 192, 0.2)',
// 'rgba(153, 102, 255, 0.2)',
// 'rgba(255, 159, 64, 0.2)',
// 'rgba(255, 98, 205,0.2)'
// ],
// borderColor: [
// 'rgba(255,99,132,1)',
// 'rgba(54, 162, 235, 1)',
// 'rgba(255, 206, 86, 1)',
// 'rgba(75, 192, 192, 1)',
// 'rgba(153, 102, 255, 1)',
// 'rgba(255, 159, 64, 1)',
// 'rgba(255, 98, 205,1)'
// ],
// borderWidth: 1,
// data: averages,
// }
// ]
// },
// options: {
// legend: {display: false},
// tooltips: {displayColors: false},
// scales: {
// yAxes: [{
// ticks: {
// min: 0,
// max: 20
// }
// }]
// }
// }
// });
var axisxlabels = ["S", "M", "T", "W", "Th", "F", "S"];
var axisylabels = ["Midnight", "1 am", "2 am", "3 am", "4 am", "5 am", "6 am", "7 am", "8 am", "9 am", "10 am", "11 am", "Noon", "1 pm", "2 pm", "3 pm", "4 pm", "5 pm", "6 pm", "7 pm", "8 pm", "9 pm", "10 pm", "11 pm"];
// switch from hours ascending to descending
axisylabels.reverse();
var headcounts2 = [];
for(var i = 0; i < 7; i++) {
headcounts2 = headcounts2.concat(headcounts.slice(i * 24, (i + 1) * 24).reverse());
}
// console.log(headcounts);
// console.log(headcounts2);
headcounts = headcounts2;
// for(var count in headcounts){
// if(headcounts[count] == '0'){
// headcounts[count] = null;
// }
// }
// console.log(headcounts);
// console.log(headcounts2);
//use Raphael over chart.js for this, has heat chart built it
//demo here: http://dmitrybaranovskiy.github.io/raphael/github/dots.html
//possibly rebuild this with chart.js for ease of use capability but for now Raphael.js
var paper = Raphael("hourlychart", width, height);
var options = { heat: true, max: 10, axisxlabels: axisxlabels, axisylabels: axisylabels, axis: "1 0 1 1", axisxstep: 6, axisystep: 23 };
dotChart = paper.dotchart(x, y, width, height, valuesx, valuesy, headcounts, options).hover(function () {
dotChart.covers = paper.set();
if(this.value != 0){
dotChart.covers.push(paper.tag(this.x, this.y, this.value , 0, this.r + 2).insertBefore(this));
}
}, function () {
dotChart.covers.remove();
});
}
function updateWeeklyTrendsChart(locationid) {
var chart = $("#hourlychart").first();
if(chart) {
chart.empty();
var x = 10;
var y = 0;
$("#hourlychart svg").first().attr("width", 348);
$("#hourlychart svg").first().attr("height", 487.2);
initWeeklyTrendsChart(locationid, x, y, 348, 487.2);
}
}
function initTrendsCharts(locationid) {
app.trendsChartsActive = true;
app.trendsChartsLocationID = locationid;
$('#trends').css('display', 'block');
updateTrendsCharts(locationid);
$('#trends').modal();
}
function updateTrendsCharts(locationid) {
/* 'https://www.purdue.edu/DRSFacilityUsageAPI/locations' JSON response looks like:
[{
"LocationId": "b0e732b7-a89b-42e7-9465-03ba48769a62", #unique to each area
"LocationName": "Field 2", #also unique
"ZoneId": "fdbd39c0-689b-4199-b366-54a2577ef35f", #zone area belongs in, non unique - group by this
"ZoneName": "TREC", #goes with ZoneId above, probably unique (no way to get zones alone from API?)
"Capacity": 50,
"Active": true, #looks like inverse of closed might just be wether people are here or not
"Closed": false, #key off of this for hours
"LastUpdatedTime": "2017-02-21T23:30:41.393", #time date stamp of last update
"ZoneAndLocationName": null #not sure what this is, always seems to be null, ignore I guess
}]
*/
if(app.trendsChartsActive) {
var data = { usage_type: "locations", token: user_token }
if(typeof locationid != 'undefined') {
data["location_id"] = locationid
}
var xhr = $.ajax({type: 'POST',
url: 'http://localhost:8081/api/v1/corec/get_usage',
data: JSON.stringify(data),
contentType: 'application/json',
//contentType: 'text/plain',
//crossDomain: true,
success: function(data){
$("#locationname").html(data.LocationName);
}})
// updateMonthlyTrendsChart(locationid);
updateWeeklyTrendsChart(locationid);
updateWeekTrendsChart(locationid);
initLastUpdatedTime(locationid, $("#lastupdatedtimetrends"));
}
}
$('#trends').on('hide.bs.modal', function () {
app.trendsChartsActive = false;
app.trendsChartsLocationID = "";
});
var app = window.app || {};
window.app = app;
app.trendsChartsActive = false;
app.trendsChartsLocationID = "";
app.initUser = initUser;
app.initLastUpdatedTime = initLastUpdatedTime;
app.initCurrentActivityCharts = initCurrentActivityCharts;
app.initTrendsCharts = initTrendsCharts;
app.updateTrendsCharts = updateTrendsCharts;
app.dynamicAlert = dynamicAlert;
});
<file_sep>/api/corec.js
var https = require('https')
var mongoose = require('mongoose');
var User = require('../models/user');
var Workout = require('../models/workout');
var Exercise = require('../models/exercise');
var History = require('../models/history');
var Routine = require('../models/routine');
var corec = new Object()
var tag_to_loc = {
"legs": ["45f053e9-67ed-48f2-bcf6-c03b86f1e261"],
"core": ["b100914b-6a26-4779-9164-b893cd05d5e7"],
"chest": ["61b3abc1-bb87-413b-b933-827bc6d58e0f,45f053e9-67ed-48f2-bcf6-c03b86f1e261"],
"arms": ["61b3abc1-bb87-413b-b933-827bc6d58e0f,45f053e9-67ed-48f2-bcf6-c03b86f1e261"],
"cardio": ["e9d35ffa-e7ff-4ba5-8f27-b4a12df95012", "f77a2aee-dd9e-4cca-ac42-a475012e85cc"],
"test": ["e9d35ffa-e7ff-4ba5-8f27-b4a12df95012"]
}
corec.get_usage = function(type, loc_id, cb) {
var url = 'https://www.purdue.edu/DRSFacilityUsageAPI/' + type
if (loc_id) {
url = url + '/' + loc_id
}
https.get(url,
(res) => {
res.setEncoding('utf8');
var full_data = ''
res.on('data', function (data) {
full_data = full_data + data
});
res.on('end', () => {
cb(JSON.parse(full_data));
});
}).on('error', (e) => { console.log("error: "); console.log(e); });
}
corec.create_routine = function(user_id, routine, cb) {
User.findOne({ 'fb_id': user_id}, (err, user) => {
if (err) {
cb(false)
return
}
var ro = Routine()
ro.creator = user._id
ro.name = routine.name
ro.tags = routine.tags
ro.exercises = []
for (i = 0; i < routine.exercises.length; i++) {
ro.exercises.push(mongoose.Types.ObjectId(routine.exercises[i]))
}
ro.save((err, saved) => {
if (err) {
cb(false)
} else {
cb(true, saved._id)
}
});
});
}
corec.schedule_workout = function(user_id, routine, date, cb) {
User.findOne({ 'fb_id': user_id }, (err, user) => {
if (err) {
cb(false)
return
}
var wo = Workout()
wo.routine = mongoose.Types.ObjectId(routine)
wo.completed_on = new Date(date)
wo.save((err, saved) => {
if (err) {
cb(false)
} else {
user.workouts.push([saved._id])
user.save((err) => {
if (err) {
console.log(err)
cb(false)
} else {
cb(true, saved._id)
}
});
}
});
});
}
corec.get_workouts = function(user_id, cb) {
User.findOne({ 'fb_id': user_id })
.populate({ path: 'workouts', populate: { path: 'routine' } })
.exec((err, user) => {
if (err) {
cb([])
return
} else {
cb(user.workouts)
}
});
}
corec.share_workout = function(user_id, wo_id, cb) {
Workout.findOne({ '_id': wo_id })
.populate({path: 'routine'})
.exec((err, wo) => {
if (err) {
cb(null)
return
}
User.findOne({ '_id': user_id }, (err, user) => {
if (err) {
cb(null)
return
}
cb({
name: wo.routine.name,
date: wo.completed_on,
user_name: user.name
})
})
})
}
corec.create_exercise = function(user_id, exercise, cb) {
User.findOne({ 'fb_id': user_id}, (err, user) => {
if (err) {
cb(false)
return
}
var ex = Exercise()
ex.name = exercise.name
ex.description = exercise.description
ex.gym_area = exercise.area
ex.type = exercise.type
ex.popularity = 0
ex.creator = user._id
ex.save((err, saved) => {
if (err) {
cb(false)
} else {
cb(true, saved._id)
}
})
});
}
corec.get_exercises = function(user_id, filters, cb) {
var query = {}
if (filters) {
if ('types' in filters) {
query['tags'] = { $in: filters['types'] }
}
}
Exercise.find(query, (err, exercises) => {
if (err) {
cb([])
} else {
cb(exercises)
}
});
}
corec.get_routines = function(user_id, filters, cb) {
var query = {}
var waiting = false
var finish = (query, date) => {
Routine.find(query, (err, routines) => {
if (err) {
cb([])
} else {
if (date) {
done = routines.length
removed = []
for (i = 0; i < routines.length; i++) {
not_crowded = function(loc, id, cb) {
corec.get_usage("locations", loc, (data) => {
capactity = data.Capacity
corec.get_usage("weeklytrends", loc, (data) => {
time = new Date(date)
data.forEach((item) => {
if (time.getDay() == item.DayOfWeek && time.getHours() == item.Hour) {
cb(id, item.Headcount > capactity * 0.6)
return
}
})
})
})
}
not_crowded(tag_to_loc[routines[i].tags[0]], i, (id, crowded) => {
if (crowded) {
removed.forEach((rem) => {
if (rem < id) { id -= 1 }
})
routines.splice(id, 1)
removed.append(id)
}
done--
if (done <= 0) {
cb(routines)
}
})
}
} else {
cb(routines)
}
}
});
}
if (filters) {
if ('tags' in filters) {
query['tags'] = { $in: filters['tags'] }
}
if ('rec' in filters && filters['rec']) {
waiting = true
User.findOne({ 'fb_id': user_id})
.populate({ path: 'workouts', populate: { path: 'routine' } })
.exec((err, user) => {
if (err) {
return
}
var compare = []
var rec1 = {completed_on: 0}
var rec2 = {completed_on: 0}
var rec3 = {completed_on: 0}
user.workouts.forEach((wo) => {
if (wo.completed_on > rec1.completed_on) {
rec2 = rec1
rec1 = wo
} else if (wo.completed_on > rec2.completed_on) {
rec3 = rec2
rec2 = wo
} else if (wo.completed_on > rec3.completed_on) {
rec3 = wo
}
})
if ('routine' in rec1) {
if (rec1.routine) {
rec1.routine.tags.forEach((tag) => {
compare.push(tag)
})
}
}
if ('routine' in rec2) {
if (rec2.routine) {
rec2.routine.tags.forEach((tag) => {
compare.push(tag)
})
}
}
if ('routine' in rec3) {
if (rec3.routine) {
rec3.routine.tags.forEach((tag) => {
compare.push(tag)
})
}
}
query['tags'] = { $in: compare }
date = null
if (filters['date']) {
date = Date.parse(filters['date'])
}
finish(query, date)
});
}
}
if (!waiting) { finish(query, null) }
}
corec.get_reccomended_time = function(exercise, calender) {
}
corec.get_location_usage = function(location) {
History.find({'LocationID':location}, (err, hist) => {
if (err) {
cb(false)
return
}
cb(hist)
});
}
module.exports = corec<file_sep>/models/history.js
var mongoose = require('mongoose');
var HistorySchema = new mongoose.Schema({
LocationID: String,
LocationName: String,
HeadCount: Number,
EntryDate: { type: Date },
DayOfWeek: Number,
DayName: String
});
module.exports = mongoose.model('History', HistorySchema);<file_sep>/models/routine.js
var mongoose = require('mongoose');
var RoutineSchema = new mongoose.Schema({
creator: { type: mongoose.Schema.Types.ObjectId, ref: 'User' },
exercises: [ { type: mongoose.Schema.Types.ObjectId, ref: 'Exercise'} ],
tags: [ String ],
name: String
});
module.exports = mongoose.model('Routine', RoutineSchema);<file_sep>/server.js
// Load required packages
var express = require('express');
var passport = require('passport');
var mongoose = require('mongoose');
var bodyParser = require('body-parser');
// Config
var conf = require('./config.js');
// MONGO
mongoose.Promise = global.Promise;
mongoose.connect(conf.DB_URL);
// mongoose.connect('mongodb://localhost:27017/fitness_buddy');
// Create the app
var app = express();
// App configuration
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({
extended: true
}));
app.use(express.static(__dirname + '/html'));
app.use(passport.initialize())
app.use('/api/v1', require('./api'));
// Get the port
var port = process.env.PORT || 8081;
// Start the server
var server = app.listen(port, () => {
console.log(`Server listening on port: ${port}`);
});
exports.server = server
|
355d8e02e1d739b5b92a95e32ba1f611ea99d238
|
[
"JavaScript"
] | 9 |
JavaScript
|
aloeb/fitness_buddy
|
a21ad85eea8028a316a0c5ebdcaa38f2581a4e11
|
f6d43e28deb3e1bfa37bac4c64bc683521b2dfe6
|
refs/heads/master
|
<repo_name>oclaudio/cubic-oscillator<file_sep>/README.md
# cubic-oscillator
Deep and classical learning of a damped cubic oscillator.
Deep learning TensorFlow heavily relies on the code provided by <NAME>, <NAME> and <NAME> in "Deep learning of dynamics and signal-noise decomposition with time-stepping constraints", see https://arxiv.org/abs/1808.02578. More comments added
In Deep learning Keras, a change from the TensorFlow framework to Keras was attempted.
Additionally, a parameter estimation procedure via Kalman Filteration has been implemented for comparision.
<file_sep>/utils.py
#necessary libraries
import numpy as np
import tensorflow as tf
from numpy.fft import fft, ifft, fftfreq
#Explicit Runge-Kutta time integrator. Assumes no time dependence in f
def RK_timestepper(x,t,f,h,weights,biases,direction='F',method = 'RK4'):
#INPUTS:
#x: vector or matrix of network input states; usually given as a placeholder
#t: vector of time; since no dependency on time, could be dropped
#f: the constructed neural network
#h: the (constant) time step
#weights: the weights of the network
#biases: the biases of the network
#direction: either 'F' or 'B': forward/backward direction of the Runge-Kutta scheme, defaults to forward
#method: used method; defaults to RK4
#several possible Runge-Kutta methods
if method == 'RK4_38':
b = [1/8,3/8,3/8,1/8]
A = [[],[1/3],[-1/3, 1],[1,-1,1]]
elif method == 'Euler':
b = [1]
A = [[]]
elif method == 'Midpoint':
b = [0,1]
A = [[],[1/2]]
elif method == 'Heun':
b = [1/2,1/2]
A = [[],[1]]
elif method == 'Ralston':
b = [1/4,3/4]
A = [[],[2/3]]
elif method == 'RK3':
b = [1/6,2/3,1/6]
A = [[],[1/2],[-1,2]]
#defaults to RK4 method
else:
b = [1/6,1/3,1/3,1/6]
A = [[],[1/2],[0, 1/2],[0,0,1]]
#number of steps for the Runge-Kutta scheme; corresponds to p
steps = len(b)
#forward direction
if direction == 'F':
#initiate K as list, first element being the network evaluated on x, weights, biases
K = [f(x, weights, biases)]
for i in range(1,steps):
#add other summands to the list
K.append(f(tf.add_n([x]+[h*A[i][j]*K[j] for j in range(i) if A[i][j] != 0]), weights, biases))
#backward direction, changes only the sign before f
else:
K = [-f(x, weights, biases)]
for i in range(1,steps):
K.append(-f(tf.add_n([x]+[h*A[i][j]*K[j] for j in range(i) if A[i][j] != 0]), weights, biases))
#OUTPUT:
#Runge-Kutta scheme for the given inputs
return tf.add_n([x]+[h*b[j]*K[j] for j in range(steps)])
#applies RK4-method using RK_timestepper in forward direction
def RK4_forward(x,t,f,h,weights,biases):
#INPUTS:
#as in RK_timestepper
#OUTPUTS:
#forward timestepper scheme using RK4 method
return RK_timestepper(x,t,f,h,weights,biases,direction='F',method = 'RK4_classic')
#applies RK4-method using RK_timestepper in backward direction
def RK4_backward(x,t,f,h,weights,biases):
#INPUTS:
#same as for RK_timestepper
#OUTPUTS:
#backward timestepper scheme using RK4 method
return RK_timestepper(x,t,f,h,weights,biases,direction='B',method = 'RK4_classic')
#calculates the activation of the next layer given weights and biases of previous layer
def dense_layer(x, W, b, last = False):
#INPUTS:
#x: Unit of the previous layer
#W: Weights of the current layer
#b: Biases of the current layer
#last: True if on last layer, false otherwise
#x -> W*x
x = tf.matmul(W,x)
#x -> x+b
x = tf.add(x,b)
#OUTPUTS:
#if last: output the activation of output layer using identity activation function
#otherwise: output the activation of the current layer using ELu activation function
if last: return x
else: return tf.nn.elu(x)
#construction of the network
def simple_net(x, weights, biases):
#INPUTS:
#x: vector or matrix of network input states; usually given as a placeholder
#weights: weights of the network
#biases: biases of the network
#initialize input layer as list
layers = [x]
#loop excludes last layer of the NN
for l in range(len(weights)-1):
#add hidden layers to the list, using dense_layer above
layers.append(dense_layer(layers[l], weights[l], biases[l]))
#output layer, linear activation function
out = dense_layer(layers[-1], weights[-1], biases[-1], last = True)
#OUTPUTS:
#returns activation of output layer
return out
#initial noise approxmation
def approximate_noise(Y, lam = 10):
#INPUTS:
#Y: data given
#lam: parameter for control of the linear matrix equation
#get shape of given data
n,m = Y.shape
#initialize D as m x m matrix
D = np.zeros((m,m))
#Set elements of D
D[0,:4] = [2,-5,4,-1]
D[m-1,m-4:] = [-1,4,-5,2]
for i in range(1,m-1):
D[i,i] = -2
D[i,i+1] = 1
D[i,i-1] = 1
#D^2
D = D.dot(D)
#solve (I + lam*D^T*D)*x = Y[j,:], reshape to correct dimensions
X_smooth = np.vstack([np.linalg.solve(np.eye(m) + lam*D.T.dot(D), Y[j,:].reshape(m,1)).reshape(1,m) for j in range(n)])
#estimated noise
N_hat = Y-X_smooth
#OUTPUTS:
#N_hat: initial noise approximation
#X_smooth: initial state approximation
return N_hat, X_smooth
#initializes the network weights, biases and noise as TensorFlow variables
def get_network_variables(n, n_hidden, size_hidden, N_hat):
#INPUT:
#n: dimension of the measurements
#n_hidden: number of hidden layers
#size_hidden: size of every hidden layer
#N_hat: initial noise approximation
#compute layer sizes, n nodes in input and output layer, size_hidden nodes in n_hidden layers
layer_sizes = [n] + [size_hidden for _ in range(n_hidden)] + [n]
#compute number of layers
num_layers = len(layer_sizes)
#initialization of network weights and biases
weights = []
biases = []
for j in range(1,num_layers):
#add weights of layer j to weights list using Xavier initializer
weights.append(tf.get_variable("W"+str(j), [layer_sizes[j],layer_sizes[j-1]], \
initializer = tf.contrib.layers.xavier_initializer(seed = 1)))
#add biases of layer j to biases list using zeros initializer
biases.append(tf.get_variable("b"+str(j), [layer_sizes[j],1], initializer = tf.zeros_initializer()))
#create TensorFlow variable N with initializer N_hat as float32
#in particular, this turns N into a trainable parameter
N = tf.get_variable("N", initializer = tf.cast(N_hat, dtype = tf.float32))
#OUTPUTS:
#returns the initialized weights, biases and noise as TensorFlow variables
#all returned variables are trainable
return (weights, biases, N)
#defines the loss function
def create_computational_graph(n, N_hat, net_params,num_dt = 10, method = 'RK4', gamma = 1e-5, beta = 1e-8, weight_decay = 'exp', decay_const = 0.9):
#INPUTS:
#n: dimension of the measurements
#N_hat: initial noise approximation
#net_params: network parameters, given as (weights, biases, N)
#num_dt: denotes q
#method: the method for the Runge-Kutta scheme in RK_timestepper
#gamma: hyperparameter of noise regularizer term, defaults to 1e-5
#beta: hyperparameter of weights regularizer term, defaults to 1e-8
#weights_decay: either 'linear' or 'exp', weights for the loss function, defaults to 'exp'
#decay_const: hyperparameter \omega_0 for exponential decay for weights of the loss function, defaults to 0.9
#n should be equal to first dimension of N_hat
assert(n == N_hat.shape[0])
#set m to second dimension of N_hat
m = N_hat.shape[1]
# Placeholders for initial condition
#set up placeholders for y_{j+i}, i=0 and j=q+1,...,m-q; noise measurements
Y_0 = tf.placeholder(tf.float32, [n,None], name = "Y_0")
#corresponding time
T_0 = tf.placeholder(tf.float32, [1,None], name = "T_0")
# Placeholders for true forward and backward predictions
true_forward_Y = []
true_backward_Y = []
for j in range(num_dt):
#add placeholders for rest of the noisy measurements y_{j+i}, i in [-q,q]\{0}
true_forward_Y.append(tf.placeholder(tf.float32, [n,None], name = "Y"+str(j+1)+"_true"))
true_backward_Y.append(tf.placeholder(tf.float32, [n,None], name = "Yn"+str(j+1)+"_true"))
#placeholder for timestep h
h = tf.placeholder(tf.float32, [1,1], name = "h")
# Forward and backward predictions of true state
#get the network parameters
(weights, biases, N) = net_params
#compute y_j - noise for j in [q+1, m-q]
X_0 = tf.subtract(Y_0, tf.slice(N, [0,num_dt],[n,m-2*num_dt]))
#apply the Runge-Kutta scheme, once for both directions, for the given network and X_0
pred_forward_X = [RK_timestepper(X_0, T_0, simple_net, h, weights, biases, method = method)]
pred_backward_X = [RK_timestepper(X_0, T_0, simple_net, h, weights, biases, method = method, direction = 'B')]
#apply the Runge-Kutta scheme for both directions up to a total number of q times, save all in a list
for j in range(1,num_dt):
pred_forward_X.append(RK_timestepper(pred_forward_X[-1], T_0, simple_net, h, weights, biases, method = method))
pred_backward_X.append(RK_timestepper(pred_backward_X[-1], T_0, simple_net, h, weights, biases,\
method = method, direction = 'B'))
# Forward and backward predictions of measured (noisy) state
#add estimated noise at time j+i to the results of the Runge-Kutta scheme above
pred_forward_Y = [pred_forward_X[j] + tf.slice(N, [0,num_dt+1+j],[n,m-2*num_dt]) for j in range(num_dt)]
pred_backward_Y = [pred_backward_X[j] + tf.slice(N, [0,num_dt-1-j],[n,m-2*num_dt]) for j in range(num_dt)]
# Set up cost function
#defaults to exponential decay
if weight_decay == 'linear': output_weights = [(1+j)**-1 for j in range(num_dt)]
else: output_weights = [decay_const**j for j in range(num_dt)]
forward_fidelity = tf.reduce_sum([w*tf.losses.mean_squared_error(true,pred) \
for (w,true,pred) in zip(output_weights,true_forward_Y,pred_forward_Y)])
backward_fidelity = tf.reduce_sum([w*tf.losses.mean_squared_error(true,pred) \
for (w,true,pred) in zip(output_weights,true_backward_Y,pred_backward_Y)])
fidelity = tf.add(forward_fidelity, backward_fidelity)
# Regularizer for NN weights
weights_regularizer = tf.reduce_mean([tf.nn.l2_loss(W) for W in weights])
# Regularizer for explicit noise term
noise_regularizer = tf.nn.l2_loss(N)
# Weighted sum of individual cost functions
cost = tf.reduce_sum(fidelity + beta*weights_regularizer + gamma*noise_regularizer)
# BFGS optimizer via scipy
optimizer = tf.contrib.opt.ScipyOptimizerInterface(cost, options={'maxiter': 50000,
'maxfun': 50000,
'ftol': 1e-15,
'gtol' : 1e-11,
'eps' : 1e-12,
'maxls' : 100})
#placeholders used above
placeholders = {'Y_0': Y_0,
'T_0': T_0,
'true_forward_Y': true_forward_Y,
'true_backward_Y': true_backward_Y,
'h': h}
return optimizer, placeholders
<file_sep>/utils_keras.py
#utils package
import numpy as np
import tensorflow as tf
from numpy.fft import fft, ifft, fftfreq
from tensorflow.keras import layers
import tensorflow.keras.backend as K
#provides an initial approximation of the noise
def approximate_noise(Y, lam = 10):
#INPUTS:
#Y: data given
#lam: parameter for control of the linear matrix equation
#get shape of given data
m,n = Y.shape
#initialize D as m x m "matrix"
D = np.zeros((m,m))
#Set elements of D
D[0,:4] = [2,-5,4,-1]
D[m-1,m-4:] = [-1,4,-5,2]
for i in range(1,m-1):
D[i,i] = -2
D[i,i+1] = 1
D[i,i-1] = 1
#D^2
D = D.dot(D)
#solve (I + lam*D^T*D)*x = Y[j,:], reshape to correct dimensions
X_smooth = np.vstack([np.linalg.solve(np.eye(m) + lam*D.T.dot(D), Y[:,j].reshape(m,1)).reshape(1,m) for j in range(n)]).astype('float32').T
#estimated noise
N_hat = Y-X_smooth
#OUTPUTS:
#N_hat: initial noise approximation
#X_smooth: initial state approximation
return N_hat, X_smooth
#Explicit Runge-Kutta time integrator. Assumes no time dependence in f
def RK_timestepper(x,f,h,direction='F'):
#INPUTS:
#x: vector or matrix of network input states; usually given as a placeholder
#f: the constructed neural network
#h: the (constant) time step
#direction: either 'F' or 'B': forward/backward direction of the Runge-Kutta scheme, defaults to forward
#RK4-method
b = [1/6,1/3,1/3,1/6]
A = [[],[1/2],[0, 1/2],[0,0,1]]
#number of steps for the Runge-Kutta scheme; corresponds to p
steps = len(b)
#forward direction
if direction == 'F':
#initiate k as list, first element being the network evaluated on x
k = [f(x)]
for i in range(1,steps):
#add other summands to the list
k.append(f(tf.add_n([x]+[h*A[i][j]*k[j] for j in range(i) if A[i][j] != 0])))
#backward direction, changes only the sign before f
else:
k = [-f(x)]
for i in range(1,steps):
k.append(-f(tf.add_n([x]+[h*A[i][j]*k[j] for j in range(i) if A[i][j] != 0])))
#OUTPUT:
#Runge-Kutta scheme for the given inputs
return tf.add_n([x]+[h*b[j]*k[j] for j in range(steps)])
##Explicit Runge-Kutta time integrator for Keras without use of TensorFlow for improved computation time.
def RK_timestepper_keras(t,f,h,direction='F',method = 'RK4'):
#INPUTS:
#x: vector or matrix of network input states; usually given as a placeholder
#f: the constructed neural network
#h: the (constant) time step
#direction: either 'F' or 'B': forward/backward direction of the Runge-Kutta scheme, defaults to forward
#RK4-method
b = [1/6,1/3,1/3,1/6]
A = [[],[1/2],[0, 1/2],[0,0,1]]
#initialize k as a list
k = [f(t, steps=1)]
#add other summands to the list
for i in range(1,len(b)):
k.append(f(np.add([t],[h*A[i][j]*k[j] for j in range(i) if A[i][j] != 0]).reshape((1,2)), steps=1))
#save sum in a list
summ = [[0,0]]
#calculate sum
for i in range(len(b)):
summ = np.add([summ],[h*b[i]*k[i]]).flatten()
#OUTPUT:
#Runge-Kutta scheme for the given inputs
return np.add([[t]], summ)
#class for calculation of the loss
class loss_class(object):
def __init__(self, Y, num_dt, dt, model, gamma, beta, decay_const, N_init,N=None, scope='N', **N_kwargs):
#INPUTS:
#Y: given measurements
#num_dt: denotes q
#dt: time-step between each measurement
#model: Keras-model of the network
#gamma: hyperparameter of noise regularizer term, defaults to 1e-5
#beta: hyperparameter of weights regularizer term, defaults to 1e-8
#decay_const: hyperparameter \omega_0 for exponential decay for weights of the loss function, defaults to 0.9
#N_init: initial noise approximation
#N: noise parameter
#scope: noise
#N_kwargs: additional arguments; not used
#define self
self.scope = scope
self.num_dt = num_dt
self.dt = dt
self.model = model
self.Y = Y
self.decay_const = decay_const
self.gamma = gamma
self.beta = beta
#initialize N as K variable
with tf.name_scope(self.scope):
if N is None:
N = K.variable(N_init, dtype=tf.float32,
name='N', **N_kwargs)
self.N_variable = N
self.N = N
#loss function
def loss(self, y_true, y_pred):
#INPUTS:
#y_true, y_pred: necessary for use of loss function in Keras, not actually used here
#get necessary variables
Y = self.Y
m,n = Y.shape
model = self.model
dt = self.dt
num_dt = self.num_dt
gamma = self.gamma
beta = self.beta
decay_const = self.decay_const
#compute loss
with tf.name_scope(self.scope):
#noisy measurements for y_{j+i}, i=0 and j=q+1,...,m-q
Y_0 = Y[num_dt:m-num_dt,:]
#noise
N = self.N_variable
#lists for forward and backward Y
true_forward_Y = []
true_backward_Y = []
#fill forward and backward lists with correct measurements
for j in range(num_dt):
true_forward_Y.append(Y[num_dt+j+1:m-num_dt+j+1,:])
true_backward_Y.append(Y[num_dt-j-1:m-num_dt-j-1,:])
#compute y_j - noise for j in [q+1, m-q]
X_0 = tf.subtract(Y_0, tf.slice(N, [num_dt,0],[m-2*num_dt,n]))
#apply the Runge-Kutta scheme, once for both directions, for the given network and X_0
pred_forward_X = [RK_timestepper(X_0, model.__call__, dt)]
pred_backward_X = [RK_timestepper(X_0, model.__call__, dt,direction = 'B')]
#apply the Runge-Kutta scheme for both directions up to a total number of q times, save all in a list
for j in range(1,num_dt):
pred_forward_X.append(RK_timestepper(pred_forward_X[-1], model.__call__, dt))
pred_backward_X.append(RK_timestepper(pred_backward_X[-1], model.__call__, dt, direction = 'B'))
# Forward and backward predictions of measured (noisy) state
#add estimated noise at time j+i to the results of the Runge-Kutta scheme above
pred_forward_Y = [pred_forward_X[j] + tf.slice(N, [num_dt+1+j,0],[m-2*num_dt,n]) for j in range(num_dt)]
pred_backward_Y = [pred_backward_X[j] +tf.slice(N, [num_dt-1-j,0],[m-2*num_dt,n]) for j in range(num_dt)]
# Set up cost function
#exponentially decreasing importance
output_weights = [decay_const**j for j in range(num_dt)]
forward_fidelity = tf.reduce_sum([w*tf.losses.mean_squared_error(true,pred) \
for (w,true,pred) in zip(output_weights,true_forward_Y,pred_forward_Y)])
backward_fidelity = tf.reduce_sum([w*tf.losses.mean_squared_error(true,pred) \
for (w,true,pred) in zip(output_weights,true_backward_Y,pred_backward_Y)])
fidelity = tf.add(forward_fidelity, backward_fidelity)
#get weights of the network
weights = []
for layers in model.layers[1:]:
weights.append(layers.get_weights()[0])
#calculate weights and noise regularizer term
weights_regularizer = tf.reduce_mean([tf.nn.l2_loss(W) for W in weights])
noise_regularizer = tf.nn.l2_loss(N)
#calculate loss function
cost = tf.reduce_sum(fidelity + gamma*noise_regularizer+ beta*weights_regularizer)
return cost
|
b11da32b2d7c2eaa25e42c40911508a82c9ddfbc
|
[
"Markdown",
"Python"
] | 3 |
Markdown
|
oclaudio/cubic-oscillator
|
eb0509700030764345731f5f5b7ed4528c4be014
|
58aa7ea8688527fccac750a9aa19214cd83dbb37
|
refs/heads/master
|
<repo_name>frontBOI/Transversal<file_sep>/README.md
<img src="https://picsum.photos/id/1040/4496/3000"
alt="Super image de qualité"
style="width: 200px;" />
# Projet transversal
Projet transversal à la super école CPE Lyon
## L'équipe, hommes d'envergure
| Nom | Puissance |
|---------------|:----------:|
|<NAME> |Ancestrale |
|<NAME> |A déterminer|
|<NAME> |Pain au lait|
|<NAME> |A déterminer|
> CPE Lyon, une école d'excellence (personne n'a dit ça)
## Le projet
Ce projet consiste à *simuler des incendies* sur une ville, et leur prise en charge par les flottes d’urgence qui vont intervenir.
## Les technologies
Afin de mener à bien ce projet, voici les différentes technologies imposées par l'équipe pédagogique:
- **GNS 3** (réseau)
- La triade sacrée *HTML/CSS/Javascript* (Web)
- **C** (IoT)
- **Java** (pour la partie Java tu coco)
- **Python & Flask** (côté serveur)
<file_sep>/setup.py
# permet la détection de la technologie Python
setup(
name='Projet transversal',
version='1.0',
long_description=__doc__,
packages=['Transversal'],
include_package_data=True,
zip_safe=False,
install_requires=['Flask']
)
|
83cae82e8ba6cefe585bf8c1bc05b4b842b92f05
|
[
"Markdown",
"Python"
] | 2 |
Markdown
|
frontBOI/Transversal
|
acf9ac442d159c8bbcc75d6a87adf0e403c1f268
|
435793353da764e57b9319cac45df3b355a5316c
|
refs/heads/master
|
<file_sep>#-*- coding: utf-8 -*-
import wx
import sys
import wx.grid
cantVar = 0
cantRes = 0
opcion = ""
Filas = 0
Columnas = 0
resultado = []
class enConstruccion(wx.Frame):
def __init__ (self, *args, **kwargs):
super(enConstruccion, self).__init__(*args, **kwargs)
self.InitUI()
def InitUI(self):
self.SetTitle('Resultado')
self.SetSize((500, 600))
self.panel = wx.Panel(self)
self.Utilidad=wx.StaticText(self.panel,-1,"Ventana En Construccion",(190,10))
self.sizer = wx.BoxSizer(wx.HORIZONTAL)
self.panel.SetSizer(self.sizer)
self.bt=wx.Button(self.panel,-1,"Continuar", pos = (190, 500))
self.Bind(wx.EVT_BUTTON, self.onContinuar, self.bt)
self.Centre()
self.Show(True)
def onContinuar(self, event):
ventanaInicio(None)
self.Destroy()
class ventanaFinal(wx.Frame):
def __init__ (self, *args, **kwargs):
super(ventanaFinal, self).__init__(*args, **kwargs)
self.InitUI()
def InitUI(self):
self.SetTitle('Resultado')
self.SetSize((500, 600))
self.panel = wx.Panel(self)
self.Utilidad=wx.StaticText(self.panel,-1,"Utilidad = "+str(resultado[0]),(190,10))
for i in range(cantVar):
self.st1=wx.StaticText(self.panel,-1,"x"+str(i)+" = " + str(resultado[i+1]),(190 ,60*(i+1)))
self.sizer = wx.BoxSizer(wx.HORIZONTAL)
self.panel.SetSizer(self.sizer)
self.bt=wx.Button(self.panel,-1,"Continuar", pos = (190, 500))
self.Bind(wx.EVT_BUTTON, self.onContinuar, self.bt)
self.Centre()
self.Show(True)
def onContinuar(self, event):
ventanaInicio(None)
self.Destroy()
class DosFases():
def __init__(self, matriz):
self.matriz = matriz
self.Iniciar()
def Iniciar(self):
global Filas
global Columnas
self.tabla = []
for i in range(cantRes+1):
self.linea = []
for j in range(cantVar):
self.linea.append(float(self.matriz[i][j]))
self.tabla.append(self.linea)
for i in range(cantRes+1):
if(self.matriz[i][cantVar+1] == "<=" or self.matriz[i][cantVar+1] == "="):
self.tabla[i].append(1)
for k in range(cantRes+1):
if(i != k):
self.tabla[k].append(0)
if(self.matriz[i][cantVar+1] == ">="):
self.tabla[i].append(1)
self.tabla[i].append(-1.0)
for k in range(cantRes+1):
if(i != k):
self.tabla[k].append(0)
self.tabla[k].append(0)
for i in range(cantRes+1):
self.tabla[i].append(float(self.matriz[i][cantVar]))
Filas = len(self.tabla)
Columnas = len(self.tabla[0])
def PrimeraFase(self):
self.tablaPrimera = []
self.linea = []
self.arti = []
self.artj = []
for i in range(cantVar):
self.linea.append(0)
self.tablaPrimera.append(self.linea)
for i in range(cantRes+1):
if(self.matriz[i][cantVar+1] == "<="):
self.tablaPrimera[0].append(0)
if(self.matriz[i][cantVar+1] == ">="):
self.tablaPrimera[0].append(-1.0)
self.tablaPrimera[0].append(0)
self.arti.append(i)
if(self.matriz[i][cantVar+1] == "="):
self.tablaPrimera[0].append(-1.0)
self.arti.append(i)
self.tablaPrimera[0].append(0)
for i in range(1, Filas):
self.linea = []
for j in range(Columnas):
self.linea.append(self.tabla[i][j])
self.tablaPrimera.append(self.linea)
for j in range(Columnas):
if(self.tablaPrimera[0][j] == -1.0):
self.f1 = self.tablaPrimera[0]
for i in range(Filas):
self.linea = []
if(self.tablaPrimera[i][j] == 1):
self.artj.append(j)
self.f2 = self.tablaPrimera[i]
for k in range(Columnas):
num = float(self.f1[k]) + float(self.f2[k])
self.linea.append(num)
self.tablaPrimera[0] = self.linea
self.SimplexPrimeraParte()
def SimplexPrimeraParte(self):
self.posx = 0
self.posy = 0
self.infinito = 99999999999
while(len(self.arti) > 0):
self.linea = []
for j in range(Columnas-1):
self.linea.append(self.tablaPrimera[0][j])
self.maximo = max(self.linea)
self.posy = self.linea.index(self.maximo)
self.division = [self.infinito]
for i in range(1, Filas):
self.division.append(self.tablaPrimera[i][Columnas-1]/self.tablaPrimera[i][self.posy])
self.minimo = min(self.division)
self.posx = self.division.index(self.minimo)
self.newarti = []
for i in self.arti:
if(int(i) != int(self.posx)):
self.newarti.append(i)
self.arti = self.newarti
self.DividirPivotePrimeraParte()
def DividirPivotePrimeraParte(self):
self.puntopivote = self.tablaPrimera[self.posx][self.posy]
for j in range(Columnas):
self.tablaPrimera[self.posx][j] = self.tablaPrimera[self.posx][j]/self.puntopivote
for i in range(Filas):
self.multi = self.tablaPrimera[i][self.posy]
for j in range(Columnas):
if(i != self.posx):
self.tablaPrimera[i][j] = self.tablaPrimera[i][j] - self.multi*self.tablaPrimera[self.posx][j]
def SegundaFase(self):
global resultado
self.tablaSegunda = []
self.linea = []
for j in range(cantVar):
self.linea.append(float(self.matriz[0][j]))
for j in range(cantVar, Columnas):
if((j in self.artj)== False):
self.linea.append(round(self.tablaPrimera[0][j],1))
self.tablaSegunda.append(self.linea)
for i in range(1,Filas):
self.linea =[]
for j in range(Columnas):
if((j in self.artj)== False):
self.linea.append(round(self.tablaPrimera[i][j],1))
self.tablaSegunda.append(self.linea)
self.SimplexSegundaParte()
for i in range(Filas):
resultado.append(self.tablaSegunda[i][Columnas-len(self.artj)-1])
def SimplexSegundaParte(self):
self.posx = 0
self.posy = 0
while(self.HayNegativo()):
self.linea = []
for j in range(Columnas-len(self.artj)-1):
self.linea.append(self.tablaSegunda[0][j])
self.minimo = min(self.linea)
self.posy = self.linea.index(self.minimo)
for i in range(Filas):
if(self.tablaSegunda[i][self.posy] == 1.0):
self.posx = i
self.pivot = self.tablaSegunda[0][self.posy]*-1
for j in range(Columnas-len(self.artj)):
self.tablaSegunda[0][j] = self.tablaSegunda[0][j]+(self.pivot*self.tablaSegunda[self.posx][j])
while(self.HayPositivo()):
self.linea = []
for j in range(len(self.tablaSegunda[0])-1):
self.linea.append(self.tablaSegunda[0][j])
self.maximo = max(self.linea)
self.posy = self.linea.index(self.maximo)
self.division = [self.infinito]
for i in range(1, Filas):
self.result = self.tablaSegunda[i][Columnas-len(self.artj)-1]/self.tablaSegunda[i][self.posy]
if(self.result > 0):
self.division.append(self.result)
else:
self.division.append(self.infinito)
self.minimo = min(self.division)
self.posx = self.division.index(self.minimo)
self.Gauss()
def Gauss(self):
self.puntopivote = self.tablaSegunda[self.posx][self.posy]
for j in range(Columnas-len(self.artj)-1):
self.tablaSegunda[self.posx][j] = self.tablaSegunda[self.posx][j]/self.puntopivote
for i in range(Filas):
self.multi = self.tablaSegunda[i][self.posy]
for j in range(Columnas-len(self.artj)):
if(i != self.posx):
self.tablaSegunda[i][j] = self.tablaSegunda[i][j] - self.multi*self.tablaSegunda[self.posx][j]
def HayNegativo(self):
bandera = False
for j in range(len(self.tablaSegunda[0])-1):
if(self.tablaSegunda[0][j] < 0):
bandera = True
break
return bandera
def HayPositivo(self):
bandera = False
for j in range(len(self.tablaSegunda[0])-1):
if(self.tablaSegunda[0][j] > 0):
bandera = True
break
return bandera
def __str__(self):
cadena = "["
for i in range(cantRes+1):
for j in range(cantVar+2):
cadena += str(self.matriz[i][j]) + ", "
cadena += "]\n"
return cadena
class RestriccionesUI(wx.Frame):
def __init__ (self, *args, **kwargs):
super(RestriccionesUI, self).__init__(*args, **kwargs)
self.InitUI()
def InitUI(self):
self.SetTitle('Variables')
self.SetSize((500, 600))
self.panel = wx.Panel(self)
self.tabla = wx.grid.Grid(self.panel)
self.tabla.CreateGrid(cantRes+1, cantVar+2)
self.tabla.SetCellValue(0, cantVar+1, 'objetivo')
self.tabla.SetCellValue(0, cantVar, '0')
self.sizer = wx.BoxSizer(wx.HORIZONTAL)
self.sizer.Add(self.tabla,1,wx.ALIGN_CENTRE|wx.ALL,35)
self.panel.SetSizer(self.sizer)
self.bt=wx.Button(self.panel,-1,"Continuar", pos = (220, 500))
self.Bind(wx.EVT_BUTTON, self.onContinuar, self.bt)
self.opcion = ['Max', 'Min']
self.text = wx.StaticText(self.panel, -1, "Objetivo", (220,400))
self.aviso= wx.StaticText(self.panel, -1, "Primero seleccione las restricciones en la ultima columna", (100,200))
self.edit = wx.ComboBox(self.panel, pos = (220, 450), choices = self.opcion,)
self.list = ['<=', '=', '>=']
self.choices=wx.grid.GridCellChoiceEditor(self.list, True)
for i in range(1, cantRes+1):
self.tabla.SetCellEditor(i, cantVar+1, self.choices)
self.Centre()
self.Show(True)
def onContinuar(self, event):
global opcion
opcion = self.edit.GetValue()
if(opcion == 'Min'):
for j in range(cantVar):
self.dato = float(self.tabla.GetCellValue(0, j))
self.dato *= -1
self.tabla.SetCellValue(0,j, str(self.dato))
self.tablero = []
for i in range(cantRes+1):
self.linea = []
for j in range(cantVar+2):
self.linea.append(self.tabla.GetCellValue(i, j))
self.tablero.append(self.linea)
matrix = DosFases(self.tablero)
matrix.PrimeraFase()
matrix.SegundaFase()
ventanaFinal(None)
self.Destroy()
class DosFasesUI(wx.Frame):
def __init__ (self, *args, **kwargs):
super(DosFasesUI, self).__init__(*args, **kwargs)
self.InitUI()
def InitUI(self):
self.SetTitle('Metodo Dos Fases')
self.SetSize((400,400))
self.Titulo = wx.StaticText(self, -1, "Investigacion De Operaciones", (190,10))
self.p1=wx.Panel(self)
self.p2=wx.Panel(self)
self.mainsz = wx.BoxSizer(wx.VERTICAL)
self.p1sz = wx.BoxSizer(wx.VERTICAL)
self.p2sz = wx.BoxSizer(wx.VERTICAL)
self.st1=wx.StaticText(self.p1,-1,"Inserte El Numero De Variables",(0,10))
self.txt1=wx.TextCtrl(self.p1,-1)
self.st2=wx.StaticText(self.p2,-1,"Inserte El Numero De Restricciones")
self.txt2=wx.TextCtrl(self.p2,-1)
self.bt=wx.Button(self,-1,"Continuar")
self.p1sz.Add(self.st1,1,wx.EXPAND|wx.ALL,10)
self.p1sz.Add(self.txt1,1,wx.EXPAND|wx.ALL,10)
self.p2sz.Add(self.st2,1,wx.EXPAND|wx.ALL,10)
self.p2sz.Add(self.txt2,1,wx.EXPAND|wx.ALL,10)
self.mainsz.Add(self.p1,1,wx.EXPAND)
self.mainsz.Add(self.p2,1,wx.EXPAND)
self.mainsz.Add(self.bt,1,wx.ALIGN_CENTRE|wx.ALL,30)
self.Bind(wx.EVT_BUTTON, self.onContinuar, self.bt)
self.p1.SetSizer(self.p1sz)
self.p2.SetSizer(self.p2sz)
self.SetSizer(self.mainsz)
self.Centre()
self.Show(True)
def onContinuar(self, event):
global cantVar
global cantRes
cantVar = int(self.txt1.GetValue())
cantRes = int(self.txt2.GetValue())
RestriccionesUI(None)
self.Destroy()
class LagrangeUI(wx.Frame):
def __init__ (self, *args, **kwargs):
super(LagrangeUI, self).__init__(*args, **kwargs)
self.InitUI()
def InitUI(self):
self.SetTitle('Algoritmo de LaGrange')
self.SetSize((400,400))
self.Titulo = wx.StaticText(self, -1, "Investigacion De Operaciones", (190,10))
self.p1=wx.Panel(self)
self.p2=wx.Panel(self)
self.mainsz = wx.BoxSizer(wx.VERTICAL)
self.p1sz = wx.BoxSizer(wx.VERTICAL)
self.p2sz = wx.BoxSizer(wx.VERTICAL)
self.st1=wx.StaticText(self.p1,-1,"Inserte El Numero De Variables",(0,10))
self.txt1=wx.TextCtrl(self.p1,-1)
self.st2=wx.StaticText(self.p2,-1,"Inserte El Numero De Restricciones")
self.txt2=wx.TextCtrl(self.p2,-1)
self.bt=wx.Button(self,-1,"Continuar")
self.p1sz.Add(self.st1,1,wx.EXPAND|wx.ALL,10)
self.p1sz.Add(self.txt1,1,wx.EXPAND|wx.ALL,10)
self.p2sz.Add(self.st2,1,wx.EXPAND|wx.ALL,10)
self.p2sz.Add(self.txt2,1,wx.EXPAND|wx.ALL,10)
self.mainsz.Add(self.p1,1,wx.EXPAND)
self.mainsz.Add(self.p2,1,wx.EXPAND)
self.mainsz.Add(self.bt,1,wx.ALIGN_CENTRE|wx.ALL,30)
self.Bind(wx.EVT_BUTTON, self.onContinuar, self.bt)
self.p1.SetSizer(self.p1sz)
self.p2.SetSizer(self.p2sz)
self.SetSizer(self.mainsz)
self.Centre()
self.Show(True)
def onContinuar(self, event):
enConstruccion(None)
self.Destroy()
class ventanaInicio(wx.Frame):
def __init__ (self, *args, **kwargs):
super(ventanaInicio, self).__init__(*args, **kwargs)
self.InitUI()
def InitUI(self):
self.panel = wx.Panel(self)
self.SetTitle('Investigacion de Operaciones')
self.SetSize((600,600))
self.Titulo = wx.StaticText(self.panel, -1, "Investigacion De Operaciones", (190,10))
self.sz=wx.BoxSizer(wx.VERTICAL)
self.boton1 = wx.Button(self.panel,-1, u"Dos Fases")
self.boton2 = wx.Button(self.panel,-1, u"Lagrange")
self.boton3 = wx.Button(self.panel,-1, u"Salir")
self.sz.Add(self.boton1,1,wx.EXPAND|wx.ALL,40)
self.sz.Add(self.boton2,1,wx.EXPAND|wx.ALL,40)
self.sz.Add(self.boton3,1,wx.EXPAND|wx.ALL,60)
self.Bind(wx.EVT_BUTTON, self.onClickButton1, self.boton1)
self.Bind(wx.EVT_BUTTON, self.onClickButton2, self.boton2)
self.Bind(wx.EVT_BUTTON, self.onQuit, self.boton3)
self.SetSizer(self.sz)
self.Centre()
self.Show(True)
def onClickButton1(self,event):
DosFasesUI(None)
self.Destroy()
def onClickButton2(self,event):
LagrangeUI(None)
self.Destroy()
def onQuit(self,event):
self.Destroy()
if __name__ == '__main__':
app = wx.App()
ventanaInicio(None)
app.MainLoop()
|
18166f2093e71123960dffa15a4d17744170ed2f
|
[
"Python"
] | 1 |
Python
|
JjVera96/ProyectoIO
|
01d5d8d7005dcbd1c59b161cf0a380166aaa5843
|
6e74cf8ae28cbc9eec8b638be71ebb56ca42b6ad
|
refs/heads/master
|
<file_sep>import React, { useState, useEffect } from "react";
import {
StyleSheet,
Text,
View,
Picker,
TextInput,
AsyncStorage,
TouchableOpacity,
ImageBackground
} from "react-native";
import DebounceTouchbleOpacity from './helpers/DebounceTouchbleOpacity'
import { useDispatch, useSelector } from "react-redux";
import { locationsSelector, } from '../redux/reducers/locations';
import { sendFromTransit, packageSelector } from '../redux/reducers/packages'
const RedirectPackage = ({ navigation }) => {
const dispatch = useDispatch();
const item = useSelector(packageSelector)
const { list, error } = useSelector(locationsSelector);
const [selectLoc, setSelectLoc] = useState("");
const handleChange = loc => {
setSelectLoc(loc);
};
const resend = () => {
dispatch(sendFromTransit(selectLoc))
navigation.navigate("DriverDetails");
};
const cancel = () => {
navigation.navigate("PackageInfo");
};
return (
<View style={styles.container}>
<View style={styles.contentInfo}>
<View style={styles.info}>
<View style={styles.colorBlock}>
<Text style={styles.headTitle}>Конечный получатель:</Text>
<Text style={styles.text}>
{item.resiverId && item.resiverId.title}
</Text>
</View>
<Text style={styles.textCenter}>
Выберите нового получателя из списка
</Text>
{list &&
<View style={(styles.pickerBlock, styles.colorBlock)}>
<Picker
selectedValue={selectLoc}
style={(styles.picker, styles.textWihte)}
onValueChange={loc => handleChange(loc)}
>
{list.length &&
list.map(loc => (
<Picker.Item
label={loc.title}
value={loc.title}
key={loc._id}
/>
))}
</Picker>
</View>
}
{/* <Text style={styles.textCenter}>или введите вручную</Text>
<TextInput
style={
(styles.textInput,
{ ...styles.colorBlock, ...styles.textWihte })
}
onChangeText={loc => setSelectLoc(loc)}
value={selectLoc}
/> */}
</View>
</View>
<View style={styles.contentCenter}>
{error && (
<View>
<Text style={styles.err}>
Ошибка обновления, повторите попытку
</Text>
</View>
)}
<View style={styles.btnBlock}>
<View>
<DebounceTouchbleOpacity onPress={cancel} delay={1000}>
<View style={styles.btn}>
<Text style={styles.btnText}>Отменить</Text>
</View>
</DebounceTouchbleOpacity>
</View>
<View>
<DebounceTouchbleOpacity onPress={resend} delay={1000}>
<View style={styles.btn}>
<Text style={styles.btnText}>Отправить</Text>
</View>
</DebounceTouchbleOpacity>
</View>
</View>
</View>
</View>
);
};
const styles = StyleSheet.create({
container: {
flex: 1,
alignItems: "center",
justifyContent: "center",
margin: 50
},
contentCenter: {
display: "flex",
alignItems: "center",
justifyContent: "center"
},
contentInfo: {
flex: 1,
marginTop: 30
},
info: {
flex: 1
},
btn: {
backgroundColor: "#fa000c",
marginHorizontal: 10,
padding: 10
},
btnText: {
color: "#fff",
fontSize: 16
},
btnBlock: {
display: "flex",
justifyContent: "space-between",
flexDirection: "row",
alignItems: "center",
marginTop: 10
},
title: {
width: "70%",
fontSize: 16
},
headTitle: {
fontWeight: "bold",
textAlign: "left",
color: "#fff",
fontSize: 16
},
err: {
color: "red",
textAlign: "center"
},
pickerBlock: {
borderWidth: 1,
borderColor: "#fff",
borderRadius: 25,
padding: 10,
margin: 5
},
piker: {
color: "#fff"
},
textInput: {
padding: 15,
margin: 5,
color: "#fff",
fontSize: 16
},
textCenter: {
textAlign: "left",
marginVertical: 10,
color: "#000",
fontWeight: "800"
},
colorBlock: {
backgroundColor: "#fa000c",
padding: 10
},
textWihte: {
color: "#fff",
fontWeight: "800",
fontFamily: 'Roboto',
fontSize: 17
},
text:{
color: "#000",
fontWeight: "800",
fontFamily: 'Roboto',
fontSize: 17
}
});
export default RedirectPackage;
<file_sep>import React from 'react';
import { TouchableOpacity, View } from 'react-native';
const DebounceTouchableOpacity = ({
children, onPress, delay = 1000, ...props
}) => {
const debounce = () => {
let isCooldown = false;
return function () {
if (isCooldown) return;
onPress.apply(this, arguments);
isCooldown = true;
setTimeout(() => isCooldown = false, delay);
};
}
return <TouchableOpacity onPress={debounce()} {...props}>
{children}
</TouchableOpacity>
}
export default DebounceTouchableOpacity;<file_sep>import React, { useState, useEffect } from "react";
import { useDispatch } from "react-redux";
import { StyleSheet, Text, View, Button } from "react-native";
import * as Permissions from "expo-permissions";
import { getPackage } from '../redux/reducers/packages';
import { BarCodeScanner } from "expo-barcode-scanner";
import BarcodeMask from 'react-native-barcode-mask';
const BarcodeScanner = ({ navigation }) => {
const [hasCameraPermission, setHasCameraPermission] = useState(null);
const [scanned, setScanned] = useState(false);
const dispatch = useDispatch();
useEffect(() => {
getPermissionsAsync();
});
getPermissionsAsync = async () => {
try {
const { status } = await Permissions.askAsync(Permissions.CAMERA);
setHasCameraPermission(status === "granted");
}
catch (err) {
console.log(err)
}
};
handleBarCodeScanned = ({ type, data }) => {
setScanned(true);
dispatch(getPackage(data));
navigation.push('PackageInfo');
};
if (hasCameraPermission === null) {
return (
<View style={styles.container}>
<Text>Получение разрешений</Text>
<Text>на использование камеры </Text>
</View>
);
}
if (hasCameraPermission === false) {
return (
<View style={styles.container}>
<Text>No access to camera</Text>
</View>
);
}
return (
<View style={styles.container}>
<BarCodeScanner
onBarCodeScanned={scanned ? undefined : this.handleBarCodeScanned}
style={StyleSheet.absoluteFillObject}
/>
<BarcodeMask width={300} height={300} />
{scanned && (
<Button title={"Повторить сканирование"} onPress={() => setScanned(false)} />
)}
</View>
);
};
const styles = StyleSheet.create({
container: {
flex: 1,
alignItems: "center",
justifyContent: "center",
backgroundColor: '#000'
}
});
export default BarcodeScanner;
<file_sep>import { ToastAndroid } from 'react-native';
export default function (msg) {
return ToastAndroid.showWithGravity(
msg,
ToastAndroid.LONG,
ToastAndroid.BOTTOM,
50,
);
}<file_sep>import React, { useState, useEffect } from "react";
import {
StyleSheet,
Text,
TextInput,
View,
ImageBackground,
TouchableOpacity,
Button
} from "react-native";
import { Container } from 'native-base';
import { useDispatch, useSelector } from "react-redux";
import { packageSelector, updateDriverDetails, errorUpdateSelector } from '../redux/reducers/packages'
const DriverDetails = ({ navigation }) => {
const dispatch = useDispatch();
const item = useSelector(packageSelector);
const updateError = useSelector(errorUpdateSelector)
const [driverName, serDrivername] = useState(item.driverDetails.driverFullname || '')
const [regNumber, serregNumber] = useState(item.driverDetails.regNumber || '')
const update = () => {
let value = {
driverDetails: {
regNumber: item.driverDetails.regNumber,
driverFullname: item.driverDetails.driverFullname
}
};
if (regNumber !== item.driverDetails.regNumber || driverName !== item.driverDetails.driverFullname) {
value.driverDetails = {
driverFullname: driverName,
regNumber
}
}
dispatch(updateDriverDetails(value, navigation))
}
const renderError = () => <View>
<Text style={styles.err}>
Ошибка обновления, повторите попытку
</Text>
</View>
const renderContent = () => <View>
<View style={styles.textBlock}>
<Text style={styles.text}>ФИО водителя</Text>
<TextInput
style={styles.textInput}
onChangeText={text => serDrivername(text)}
value={driverName}
/>
</View>
<View style={styles.textBlock}>
<Text style={styles.text}>Рег. номер автомобиля</Text>
<TextInput
style={styles.textInput}
onChangeText={text => serregNumber(text)}
value={regNumber}
/>
</View>
</View>
return (
<ImageBackground
source={require("../assets/bg4.png")}
style={{ width: "100%", height: "100%" }}
>
<View style={styles.container}>
{updateError ? renderError() : renderContent()}
<TouchableOpacity onPress={update}>
<View style={styles.btn}>
<Text style={styles.btnText}>Отправить</Text>
</View>
</TouchableOpacity>
</View >
</ImageBackground >
);
};
const styles = StyleSheet.create({
container: {
flex: 1,
alignItems: "center",
justifyContent: "space-between",
margin: 50
},
contentCenter: {
display: "flex",
alignItems: "center",
justifyContent: "center"
},
textBlock: {
textAlign: "center",
marginVertical: 10,
color: "#000",
fontWeight: "800",
fontSize: 24,
minWidth: 300
},
btn: {
marginTop: 20,
backgroundColor: "#fa000c",
marginHorizontal: 10,
padding: 10
},
btnText: {
color: "#fff",
fontSize: 16
},
err: {
color: "red",
textAlign: "center"
},
textInput: {
backgroundColor: "#fa000c",
padding: 15,
margin: 5,
color: "#fff",
fontSize: 16
},
colorBlock: {
backgroundColor: "#fa000c"
},
textWihte: {
color: "#fff",
padding: 10
}
});
export default DriverDetails;
<file_sep>import { all, fork } from 'redux-saga/effects';
import {saga as authSaga } from '../reducers/auth';
import {saga as packagesSaga } from '../reducers/packages';
import {saga as locationsSaga } from '../reducers/locations';
import {saga as qrCodeSaga } from '../reducers/qrCode';
import {saga as usersSaga } from '../reducers/users';
function* rootSaga() {
yield all([
fork(authSaga),
fork(packagesSaga),
fork(locationsSaga),
fork(qrCodeSaga),
fork(usersSaga)
]);
}
export default rootSaga;
<file_sep>import { all, fork, put, takeLatest } from 'redux-saga/effects';
import api from '../../../api';
const GET_USERS = '@@users/GET_USERS';
const GET_USERS_SUCCESS = '@@users/GET_USERS_SUCCESS';
const GET_USERS_FAIL = '@@users/GET_USERS_FAIL';
const ADD_USER = '@@users/ADD_USER';
const ADD_USER_SUCCESS = '@@users/ADD_USER_SUCCESS';
const ADD_USER_FAIL = '@@users/ADD_USER_FAIL';
const UPDATE_USER = '@@users/UPDATE_USER';
const UPDATE_USER_SUCCESS = '@@users/UPDATE_USER_SUCCESS';
const UPDATE_USER_FAIL = '@@users/UPDATE_USER_FAIL';
const DELETE_USER = '@@users/DELETE_USER';
const DELETE_USER_SUCCESS = '@@users/DELETE_USER_SUCCESS';
const DELETE_USER_FAIL = '@@users/DELETE_USER_FAIL';
// reducers
const initialState = {
loading: false,
error: false,
list: [],
count: 0
};
export const reducer = (state = initialState, action ) => {
const { type, payload } = action;
switch (type) {
case GET_USERS:
return state;
case GET_USERS_SUCCESS:
return { ...state, list: payload.list, count: payload.count };
case GET_USERS_FAIL:
return { ...state, ...payload };
default:
return state;
}
};
// selectors
export const getUserFromState = (state) => state.users;
// actions creators
export const getUsers = (token, skip, limit) =>
({ type: GET_USERS, payload: { token, skip, limit } });
export const getUsersSuccess = (users) =>
({ type: GET_USERS_SUCCESS, payload: users });
export const getUsersFail = (err) =>
({ type: GET_USERS_FAIL, payload: { error: err } });
// add
export const addUser = (token, newUser) =>
({ type: ADD_USER, payload: { token, newUser } });
export const addUserSuccess = (newUser) =>
({ type: ADD_USER_SUCCESS, payload: newUser });
export const addUserFail = (error) =>
({ type: ADD_USER_FAIL, payload: error });
// update
export const updateUser = (token, editedUser) =>
({ type: UPDATE_USER, payload: { token, editedUser } });
export const updateUserSuccess = (updatedUser) =>
({ type: UPDATE_USER_SUCCESS, payload: updatedUser });
export const updateUserFail = (error) =>
({ type: UPDATE_USER_FAIL, payload: error });
// delete
export const deleteUser = (token, id) =>
({ type: DELETE_USER, payload: { token, id } });
export const deleteUserSuccess = (deletedLocation) =>
({ type: DELETE_USER_SUCCESS, payload: deletedLocation });
export const deleteUserFail = (error) =>
({ type: DELETE_USER_FAIL, payload: error });
// sagas
export function* saga() {
yield all([
fork(watchGetUsersSaga),
fork(watchAddUsersaga),
fork(watchUpdateUsersaga),
fork(watchDeleteUsersaga)
]);
}
function* watchGetUsersSaga() {
yield takeLatest(GET_USERS, getUsersSaga);
}
function* watchAddUsersaga() {
yield takeLatest(ADD_USER, addUserSaga);
}
function* watchUpdateUsersaga() {
yield takeLatest(UPDATE_USER, updateUsersaga);
}
function* watchDeleteUsersaga() {
yield takeLatest(DELETE_USER, deleteUsersaga);
}
function* getUsersSaga(action) {
try {
const { token, skip, limit } = action.payload;
api.defaults.headers.common.Authorization = `Baerer ${token}`;
const result = yield api.post('/users/', { skip, limit });
if (!result) {
throw new Error('error with get data');
}
yield put(getUsersSuccess(result.data));
} catch (error) {
yield put(getUsersFail(error));
}
}
function* addUserSaga(action) {
try {
const { token, newUser } = action.payload;
api.defaults.headers.common.Authorization = `Baerer ${token}`;
const result = yield api.post('/users/create', newUser);
if (!result) {
throw new Error('error with get update');
}
yield put(addUserSuccess(result.data));
yield put(getUsers(token, 0, 1000));
} catch (error) {
yield put(addUserFail(error));
}
}
function* updateUsersaga(action) {
try {
const { token, editedUser } = action.payload;
api.defaults.headers.common.Authorization = `Baerer ${token}`;
const result = yield api.post('/users/update/', editedUser);
if (!result) {
throw new Error('error with get data');
}
yield put(updateUserSuccess(result.data));
yield put(getUsers(token, 0, 1000));
} catch (error) {
yield put(updateUserFail(error));
}
}
function* deleteUsersaga(action) {
try {
const { token, id } = action.payload;
api.defaults.headers.common.Authorization = `Baerer ${token}`;
const result = yield api.post('/users/delete', { id });
if (!result) {
throw new Error('error with get data');
}
yield put(deleteUserSuccess(result.data));
yield put(getUsers(token, 0, 1000));
} catch (error) {
yield put(deleteUserFail(error));
}
}
<file_sep>import axios from 'react-native-axios';
const api = axios.create({
baseURL: 'http://172.16.58.3:3000/api/v2/',
headers: {
'Content-Type': 'application/json;charset=utf-8'
}
});
export default api;
<file_sep>import React, { useState } from "react";
import { useDispatch, useSelector } from "react-redux";
import {
StyleSheet,
Text,
View,
Image,
AsyncStorage,
ActivityIndicator,
ImageBackground,
ScrollView,
FlatList
} from "react-native";
import { Ionicons } from "@expo/vector-icons";
import moment from "moment";
import DebounceTouchbleOpacity from "./helpers/DebounceTouchbleOpacity";
import { authSelector } from "../redux/reducers/auth";
import {
packageSelector,
errorSelector,
loadingSelector,
changeUpdateItem,
setUpdateItem
} from "../redux/reducers/packages";
import { Container, Content } from "native-base";
const PackageInfo = ({ navigation }) => {
const [isOpenItems, setIsOpenItems] = useState(false);
const [isOpenTransit, setIsOpenTransit] = useState(false);
const dispatch = useDispatch();
const item = useSelector(packageSelector);
const auth = useSelector(authSelector);
const error = useSelector(errorSelector);
const loading = useSelector(loadingSelector);
const finalWerehouse = auth.user.locationId === item?.resiverId?._id;
const onAccept = () => {
navigation.push("AcceptPackage");
};
const transmit = () => {
navigation.push("RedirectPackage");
};
const send = () => {
const data = {
_id: item._id,
sendData: Date.now(),
sendUserId: auth?.user?.id || "",
status: "передано в доставку"
};
dispatch(changeUpdateItem(data));
navigation.push("DriverDetails");
};
const toggleItemList = () => {
setIsOpenItems(!isOpenItems);
};
const toggleTransitList = () => {
setIsOpenTransit(!isOpenTransit);
};
const renderLoadError = () => (
<View>
<Text style={styles.err}>
{`Ошибка загрузки данных об отправлении,
повторите попытку`}
</Text>
<DebounceTouchbleOpacity onPress={() => navigation.pop(1)} delay={1000}>
<View style={styles.btn}>
<Text style={styles.btnText}>Вернуться к сканированию</Text>
</View>
</DebounceTouchbleOpacity>
</View>
);
const renderInventoryEmpty = () => (
<View>
<Text style={styles.err}>{`Список предметов пуст`}</Text>
</View>
);
const renderTransitEmpty = () => (
<View>
<Text style={styles.err}>{`Транзитные пункты не добавлены`}</Text>
</View>
);
const renderButton = () => {
if (item.status === "accepted" || item.status === "доставлено") {
return (
<View>
<Text style={styles.err}>Комплект уже принят!</Text>
</View>
);
}
if (item.status === "notSent" || item.status === "не отправлено") {
return (
<DebounceTouchbleOpacity onPress={send}>
<View style={styles.btn}>
<Text style={styles.btnText}>Отправить</Text>
</View>
</DebounceTouchbleOpacity>
);
}
return (
<View style={styles.btnBlock}>
<View>
{!finalWerehouse && (
<DebounceTouchbleOpacity onPress={transmit}>
<View style={styles.btn}>
<Text style={styles.btnText}>Переслать</Text>
</View>
</DebounceTouchbleOpacity>
)}
</View>
<View>
<DebounceTouchbleOpacity onPress={onAccept}>
{!finalWerehouse ? (
<View style={styles.btn}>
<Text style={styles.btnText}>Принять на транзитный склад</Text>
</View>
) : (
<View style={styles.btn}>
<Text style={styles.btnText}>Принять и закончить маршрут</Text>
</View>
)}
</DebounceTouchbleOpacity>
</View>
</View>
);
};
const renderInventory = () => (
<View>
<DebounceTouchbleOpacity onPress={toggleItemList}>
<View style={styles.listbtn}>
<Text style={styles.btnText}>
{isOpenItems
? "Cкрыть список предметов"
: "Показать список предметов"}
</Text>
<Ionicons name="md-list" size={32} color="#fff" />
</View>
</DebounceTouchbleOpacity>
{isOpenItems && (
<View>
<ScrollView>
<FlatList
data={item.inventory}
renderItem={({ item }) => (
<View style={styles.list}>
<View style={styles.titleItem}>
<Text style={styles.listText}>{item.title}</Text>
</View>
<View style={styles.countItem}>
<Text style={styles.listText}>{item.count}</Text>
</View>
</View>
)}
keyExtractor={item => item._id.toString()}
></FlatList>
</ScrollView>
</View>
)}
</View>
);
const renderTransit = () => (
<View>
<DebounceTouchbleOpacity onPress={toggleTransitList}>
<View style={styles.listbtn}>
<Text style={styles.btnText}>
{isOpenTransit
? "Cкрыть транзитные пункты"
: "Показать транзитные пункты"}
</Text>
<Ionicons name="md-list" size={32} color="#fff" />
</View>
</DebounceTouchbleOpacity>
{isOpenTransit && item.transit?.length > 0 && (
<View>
<ScrollView>
<FlatList
data={item.transit}
renderItem={({ item }) => (
<View style={styles.list}>
<View style={styles.titleItemT}>
<Text style={styles.listText}>
{item.sendLocId && item.sendLocId.title}
</Text>
</View>
<View style={styles.countItemT}>
<Text style={styles.listText}>
{item.date &&
moment(item.date).format("DD.MM.YYYY hh:mm")}
</Text>
</View>
</View>
)}
keyExtractor={item => "transit_" + item._id}
></FlatList>
</ScrollView>
</View>
)}
</View>
);
if (error) {
return (
<Container style={{ justifyContent: "center" }}>
{error && (
<View>
<Text style={styles.err}>
{`При загрузки данных возникла ошибка,
проверте интернет соединение
и повторите попытку`}
</Text>
</View>
)}
</Container>
);
}
return (
<Container style={{ justifyContent: "center" }}>
{loading && <ActivityIndicator size="large" color="#fa000c" />}
{!loading && !item && renderLoadError()}
{!loading && item && (
// <View style={styles.contentInfo}>
<Content style={styles.contentInfo}>
<View style={styles.info}>
<Text style={styles.textheader}>Получатель:</Text>
<Text style={styles.text}>
{item.resiverId && item.resiverId.title}
</Text>
<Text style={styles.textheader}>Примечание:</Text>
<Text style={styles.text}>{item.note}</Text>
</View>
<View style={styles.listBlock}>
{item.inventory?.length
? renderInventory()
: renderInventoryEmpty()}
</View>
<View style={styles.listBlock}>
{item.transit?.length ? renderTransit() : renderTransitEmpty()}
</View>
<View style={styles.listBlock}>
<View style={styles.driverDetails}>
<Text style={styles.textheader}>Транспортные данные:</Text>
<Text style={styles.text}>
{`ФИО ${item.driverDetails.driverFullname}`}
</Text>
<Text style={styles.text}>
{`№ ${item.driverDetails.regNumber}`}
</Text>
</View>
</View>
</Content>
)}
<View style={styles.contentCenter}>
{!loading && item && renderButton()}
</View>
</Container>
);
};
const styles = StyleSheet.create({
container: {
flex: 1,
alignItems: "center",
justifyContent: "center",
margin: 50
},
contentCenter: {
display: "flex",
alignItems: "center",
justifyContent: "center"
},
contentInfo: {
flex: 1,
marginTop: 30,
marginBottom: 15,
paddingHorizontal: 20
},
info: {
backgroundColor: "#fa000c",
padding: 15
},
loading: {
width: 200,
height: 200
},
text: {
// textAlign: "center",
color: "#000",
fontFamily: "Roboto",
fontSize: 17,
fontWeight: "700"
},
textheader: {
// textAlign: "center",
color: "#fff",
fontWeight: "800",
fontFamily: "Roboto",
fontSize: 17
},
listBlock: {
flex: 1,
// textAlign: "center",
marginTop: 30,
minWidth: 300
},
list: {
flex: 1,
flexDirection: "row",
width: "100%",
borderBottomWidth: 1,
borderBottomColor: "#fff",
justifyContent: "space-between",
backgroundColor: "#fa000c",
padding: 10
},
titleItem: {
width: "70%"
},
countItem: {
width: "20%"
},
titleItemT: {
width: "55%"
},
countItemT: {
width: "40%"
},
listText: {
color: "#000",
fontFamily: "Roboto",
fontSize: 17
},
listbtn: {
display: "flex",
flexDirection: "row",
backgroundColor: "#fa000c",
justifyContent: "space-between",
alignItems: "center",
paddingVertical: 10,
paddingRight: 30,
paddingLeft: 10
},
btn: {
justifyContent: "center",
alignItems: "center",
backgroundColor: "#fa000c",
padding: 10,
borderWidth: 1,
borderColor: "#fff",
// margin: 3
},
btnText: {
color: "#fff",
fontFamily: "Roboto",
fontSize: 16
},
btnBlock: {
display: "flex",
width: "100%",
justifyContent: "center",
flexDirection: "row",
alignItems: "center",
paddingHorizontal: 20
},
err: {
color: "red",
textAlign: "center",
fontFamily: "Roboto",
fontSize: 16,
marginVertical: 20
},
driverDetails: {
alignItems: "flex-start",
paddingLeft: 10,
paddingVertical: 10,
backgroundColor: "#fa000c"
}
});
export default PackageInfo;
<file_sep>import {
call,
put,
takeLatest,
all,
fork,
spawn,
select
} from "redux-saga/effects";
import { AsyncStorage } from "react-native";
import { Notifications } from "expo";
import * as Permissions from "expo-permissions";
import api from "../../../api";
const LOG_IN = "@@auth/LOG_IN";
const LOG_IN_SUCCESS = "@@auth/LOG_IN_SUCCESS";
const LOG_IN_FAIL = "@@auth/LOG_IN_FAIL";
const LOG_OUT = "@@auth/LOG_OUT";
const GET_USER_INFO = "@@auth/GET_USER_INFO";
const GET_USER_INFO_SUCCESS = "@@auth/GET_USER_INFO_SUCCESS";
const GET_USER_INFO_FAIL = "@@auth/GET_USER_INFO_FAIL";
const initialState = {
loading: false,
user: {
username: "",
token: "",
role: "",
locationId: "",
id: ""
},
error: false
};
// reducer
export const reducer = (state = initialState, action) => {
const { type, payload } = action;
switch (type) {
case LOG_IN:
return state;
case LOG_IN_SUCCESS:
return { ...state, user: payload };
case LOG_IN_FAIL:
return { ...state, error: true };
case LOG_OUT:
return {
loading: false,
user: {
username: "",
token: "",
role: "",
locationId: "",
id: ""
},
error: false
};
case GET_USER_INFO:
return state;
case GET_USER_INFO_SUCCESS:
return { ...state, user: payload };
case GET_USER_INFO_FAIL:
return { ...state, error: true };
default:
return state;
}
};
// selectors
export const authSelector = state => state.auth;
// Action creators
export const loginStart = authData => ({ type: LOG_IN, payload: authData });
export const loginSuccess = successData => ({
type: LOG_IN_SUCCESS,
payload: successData
});
export const loginFail = error => ({ type: LOG_IN_FAIL, payload: { error } });
export const logout = () => ({ type: LOG_OUT });
export const getUserInfo = token => ({ type: GET_USER_INFO, payload: token });
export const getUserInfoSuccess = successData => ({
type: GET_USER_INFO_SUCCESS,
payload: successData
});
export const getUserInfoFail = error => ({
type: GET_USER_INFO_FAIL,
payload: { error }
});
// Sagsas
export function* saga() {
yield all([fork(watchLoginSaga)]);
}
// wathers
function* watchLoginSaga() {
yield takeLatest(LOG_IN, loginSaga);
yield takeLatest(LOG_OUT, logoutSaga);
yield takeLatest(GET_USER_INFO, userInfoSaga);
yield takeLatest(LOG_IN_SUCCESS, sendExpoTockenSaga);
}
function* loginSaga({ payload: { login, password, navigation } }) {
try {
const result = yield api.post("/users/login", { login, password });
if (!result) {
throw new Error("Forbidden");
}
const successData = {
loading: false,
user: {
username: result.data.username,
token: result.data.token,
role: result.data.role,
locationId: result.data.locationId,
id: result.data.id
},
error: false
};
yield spawn(setUserToAsyncStorageSaga, result.data);
yield put(loginSuccess(successData));
navigation.replace("Home");
} catch (e) {
yield put(loginFail(e.message));
}
}
function* logoutSaga(action) {
yield AsyncStorage.clear();
}
function* setUserToAsyncStorageSaga(user) {
yield AsyncStorage.setItem("user", JSON.stringify(user));
}
function* userInfoSaga(action) {
try {
api.defaults.headers.common.Authorization = `Baerer ${action.payload}`;
const result = yield api.post("/users/getByToken");
if (!result) {
throw new Error("Forbidden");
}
const successData = { ...result.data, token: action.payload };
yield put(getUserInfoSuccess(successData));
} catch (e) {
yield put(getUserInfoFail(e.message));
}
}
function* sendExpoTockenSaga() {
const { user } = yield select(authSelector)
const { status } = yield call(
[Permissions, "askAsync"],
Permissions.NOTIFICATIONS
);
if (status !== "granted") {
alert("нет разрешений на Push-уведомления");
} else {
let token = yield call([Notifications, "getExpoPushTokenAsync"]);
try {
api.defaults.headers.common.Authorization = `Baerer ${user.token}`;
const data = JSON.stringify({
token: token,
userId: user.id
});
yield api.post("/users/addExpoToken", data);
} catch (error) {
console.log(error);
}
}
}
<file_sep>import React, { useState } from "react";
import { StyleSheet, Text, View, TextInput, ActivityIndicator } from "react-native";
import DebounceTouchbleOpacity from './helpers/DebounceTouchbleOpacity'
import { useDispatch, useSelector } from "react-redux";
import { authSelector } from '../redux/reducers/auth'
import { packageSelector, errorSelector, updateLaodingSelector, acceptPackage } from '../redux/reducers/packages';
const AcceptPackage = ({ navigation }) => {
const dispatch = useDispatch();
const item = useSelector(packageSelector);
const auth = useSelector(authSelector)
const error = useSelector(errorSelector);
const updateLaoding = useSelector(updateLaodingSelector)
const finalWerehouse = auth.user.locationId === item?.resiverId?._id;
const [comment, setComment] = useState(item.comment || '');
const onAccept = () => {
dispatch(acceptPackage(comment, navigation));
};
const cancel = () => {
navigation.navigate("PackageInfo");
};
if (updateLaoding) {
return <View style={styles.contentCenter}>
<ActivityIndicator size="large" color="#fa000c" />
</View>
}
return (
<View style={styles.container}>
<View style={styles.contentInfo}>
<View style={styles.info}>
<Text style={styles.headTitle}>Комментарий:</Text>
<TextInput
style={styles.textInput}
multiline={true}
numberOfLines={30}
onChangeText={text => setComment(text)}
value={comment}
/>
</View>
</View>
<View style={styles.contentCenter}>
{error && (
<View>
<Text style={styles.err}>Ошибка обновления, повторите попытку</Text>
</View>
)}
</View>
<View style={styles.btnBlock}>
<View>
<DebounceTouchbleOpacity onPress={cancel}>
<View style={styles.btn}>
<Text style={styles.btnText}>Отменить</Text>
</View>
</DebounceTouchbleOpacity>
</View>
<View>
<DebounceTouchbleOpacity onPress={onAccept}>
<View style={styles.btn}>
<Text style={styles.btnText}>Принять</Text>
</View>
</DebounceTouchbleOpacity>
</View>
</View>
</View>
);
};
const styles = StyleSheet.create({
container: {
flex: 1,
alignItems: "center",
justifyContent: "space-between",
margin: 30
},
contentCenter: {
flex: 1,
alignItems: "center",
justifyContent: "center"
},
contentInfo: {
marginTop: 30,
justifyContent: "space-between",
},
info: {
width: 300,
height: "70%"
},
loading: {
width: 200,
height: 200
},
btn: {
backgroundColor: "#fa000c",
marginHorizontal: 10,
padding: 10
},
btnText: {
color: "#fff",
fontFamily: 'Roboto',
fontSize: 16
},
btnBlock: {
justifyContent: "space-between",
flexDirection: "row",
alignItems: "center",
marginTop: 10
},
list: {
flexDirection: "row",
width: "100%",
borderBottomWidth: 1,
justifyContent: "space-between"
},
title: {
width: "70%"
},
count: {
width: "20%"
},
headTitle: {
fontWeight: "bold",
marginBottom: 10
},
textInput: {
// height: 400,
padding: 10,
textAlignVertical: "top",
borderColor: "#fa000c",
borderWidth: 1
},
err: {
color: "red",
textAlign: "center"
}
});
export default AcceptPackage;
<file_sep>import React from "react";
import { createAppContainer } from "react-navigation";
import { createStackNavigator } from "react-navigation-stack";
import {Provider} from 'react-redux';
import BarcodeScanner from "./components/BarCodeScaner";
import PackageInfo from "./components/PackageInfo";
import Home from "./components/Home";
import Auth from './components/Auth'
import AcceptPackage from "./components/AcceptPackage";
import RedirectPackage from "./components/RedirectPackage";
import ShowStatus from "./components/ShowStatus"
import DriverDetails from './components/DriverDetails';
import store from './redux/store';
const AppNavigator = createStackNavigator(
{
Home: {
screen: Home
},
Login: {
screen: Auth
},
BarcodeScanner: {
screen: BarcodeScanner
},
PackageInfo: {
screen: PackageInfo
},
AcceptPackage: {
screen: AcceptPackage
},
RedirectPackage: {
screen: RedirectPackage
},
ShowStatus: {
screen: ShowStatus
},
DriverDetails: {
screen: DriverDetails
},
},
{
defaultNavigationOptions: {
header: null
}
}
);
const AppContainer = createAppContainer(AppNavigator);
export default class App extends React.Component {
render () {
return (
<Provider store={store}>
<AppContainer/>
</Provider>
)
}
}<file_sep>import React from "react";
import { StyleSheet, Text, View, TouchableOpacity } from "react-native";
import DebounceTouchbleOpacity from './helpers/DebounceTouchbleOpacity'
export default function ShowStatus({ navigation }) {
const retunToHome = () => {
navigation.navigate("Home");
};
return (
<View style={styles.container}>
<View>
<Text style={styles.textMSG}>Успешно отправлено!</Text>
</View>
<View>
<DebounceTouchbleOpacity onPress={retunToHome}>
<View style={styles.btn}>
<Text style={styles.btnText}>Вернуться на главный экран</Text>
</View>
</DebounceTouchbleOpacity>
</View>
</View>
);
}
const styles = StyleSheet.create({
container: {
flex: 1,
alignItems: "center",
justifyContent: "space-between",
paddingVertical:50
},
btn: {
display: "flex",
flexDirection: "row",
justifyContent: "center",
alignItems: "center",
width: 200,
height: 45,
borderWidth: 1,
backgroundColor: "#fa000c",
padding: 10,
borderColor: "#fff"
},
btnText: {
padding: 10,
color: "#fff",
textAlign: "center"
},
textMSG: {
marginTop: 100,
fontSize: 24,
color: "#148031",
fontWeight: "800"
}
});
|
8288f6febe067010ab6c39cd35efcf3bceb4966f
|
[
"JavaScript"
] | 13 |
JavaScript
|
DenisLebedinsky/Logistic-mobile-app
|
2f94747931dcc7649e26069537d7753fcff2c89b
|
c61adde167adce84c1aac0a91523b6d1061e09b7
|
refs/heads/main
|
<file_sep># OrderAutomation
This project is about an order otomation system for a supermarket. The project has written with C# language and MSSQL.
Written by <NAME> and <NAME>.
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Data.SqlClient;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
namespace siparisotomasyonu
{
public partial class MusteriGiris : Form
{
public MusteriGiris()
{
InitializeComponent();
}
SqlConnection x = new SqlConnection("Server=DESKTOP-R5P3ANL\\SQLEXPRESS;Database=siparisotomasyonu;trusted_connection=true;");
private void MusteriGiris_Load(object sender, EventArgs e)
{
}
private void txt_tcno_giris_TextChanged(object sender, EventArgs e)
{
}
private void btn_anamenu_giris_Click(object sender, EventArgs e)
{
AnaSayfa don = new AnaSayfa();
don.Show();
this.Close();
}
static public string musteriIDsi;
private void btn_musteri_giris_Click(object sender, EventArgs e)
{
x.Open();
string giris = "SELECT * from musteri where tcno=@tcno AND sifre=@sifre";
SqlCommand komut = new SqlCommand(giris, x);
komut.Parameters.AddWithValue("@tcno", txt_tcno_giris.Text);
komut.Parameters.AddWithValue("@sifre", txt_sifre_giris.Text);
SqlDataAdapter d = new SqlDataAdapter(komut);
SqlDataReader r = komut.ExecuteReader();
if (r.Read())
{
txt_tcno_giris.Text = r["tcno"].ToString();
txt_sifre_giris.Text = r["sifre"].ToString();
musteriIDsi = r["musteriID"].ToString();
Alisveris av = new Alisveris();
av.Show();
this.Close();
}
else
MessageBox.Show("Hatalı giriş yaptınız. Lütfen tekrar deneyiniz.");
x.Close();
}
private void checkBox1_CheckedChanged(object sender, EventArgs e)
{
if (checkBox1.Checked)
{
txt_sifre_giris.PasswordChar = '\0';
}
else
{
txt_sifre_giris.PasswordChar = '*';
}
}
private void txt_sifre_giris_TextChanged(object sender, EventArgs e)
{
txt_sifre_giris.PasswordChar = '*';
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Data.SqlClient;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
namespace siparisotomasyonu
{
public partial class AnaSayfa : Form
{
public AnaSayfa()
{
InitializeComponent();
}
SqlConnection x = new SqlConnection();
private void Form1_Load(object sender, EventArgs e)
{
}
private void btn_kayitgrs_Click(object sender, EventArgs e)
{
MusteriKayit kyt = new MusteriKayit();
kyt.Show();
this.Hide();
}
private void btn_cikis_Click(object sender, EventArgs e)
{
Application.Exit();
}
private void btn_musgrs_Click(object sender, EventArgs e)
{
MusteriGiris grs = new MusteriGiris();
grs.Show();
this.Hide();
}
private void btn_yoneticigrs_Click(object sender, EventArgs e)
{
YoneticiGiris yon = new YoneticiGiris();
yon.Show();
this.Hide();
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Data.SqlClient;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
namespace siparisotomasyonu
{
public partial class MusteriKayit : Form
{
public MusteriKayit()
{
InitializeComponent();
}
SqlConnection x = new SqlConnection("Server=DESKTOP-R5P3ANL\\SQLEXPRESS;Database=siparisotomasyonu;trusted_connection=true;");
private void label2_Click(object sender, EventArgs e)
{
}
private void textBox1_TextChanged(object sender, EventArgs e)
{
}
private void label1_Click(object sender, EventArgs e)
{
}
private void label5_Click(object sender, EventArgs e)
{
}
private void btn_kaydet_Click(object sender, EventArgs e)
{
x.Open();
SqlCommand komut = new SqlCommand("insert into musteri(ad,soyad,adres,tcno,sifre) values(@ad,@soyad,@adres,@tcno,@sifre)", x);
komut.Parameters.AddWithValue("@ad", txt_ad_kayit.Text);
komut.Parameters.AddWithValue("@soyad", txt_soyad_kayit.Text);
komut.Parameters.AddWithValue("@adres", txt_adres_kayit.Text);
komut.Parameters.AddWithValue("@tcno", txt_tc_kayit.Text);
komut.Parameters.AddWithValue("@sifre", txt_sifre_kayit.Text);
komut.ExecuteNonQuery();
x.Close();
MessageBox.Show("Müşteri kaydı eklendi.");
foreach (Control item in this.Controls)
{
if (item is TextBox)
{
item.Text = "";
}
}
}
private void btn_menuyedon_kayit_Click(object sender, EventArgs e)
{
AnaSayfa don = new AnaSayfa();
don.Show();
this.Close();
}
private void checkBox1_CheckedChanged(object sender, EventArgs e)
{
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Data.SqlClient;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
namespace siparisotomasyonu
{
public partial class Alisveris : Form
{
public Alisveris()
{
InitializeComponent();
}
SqlConnection n = new SqlConnection("Server=DESKTOP-R5P3ANL\\SQLEXPRESS;Database=siparisotomasyonu;trusted_connection=true;");
DataSet daset = new DataSet();
static public string sepet_toplam;
private void SepetTablosu()
{
n.Open();
SqlDataAdapter adtr = new SqlDataAdapter("select *from sepet where musteriID='"+txt_aktifID.Text+"'", n);
adtr.Fill(daset, "sepet");
dataGridView2.DataSource = daset.Tables["sepet"];
dataGridView2.Columns[0].Visible = false;
dataGridView2.Columns[5].Visible = false;
dataGridView2.Columns[1].Visible = false;
n.Close();
}
private void UrunlerTablosu()
{
n.Open();
SqlDataAdapter adtr = new SqlDataAdapter("select *from urun", n);
adtr.Fill(daset, "urun");
dataGridView1.DataSource = daset.Tables["urun"];
dataGridView1.Columns[0].Visible = false;
n.Close();
}
private void Alisveris_Load(object sender, EventArgs e)
{
txt_aktifID.Text = MusteriGiris.musteriIDsi;
UrunlerTablosu();
SepetTablosu();
SepetToplam();
}
private void SepetToplam()
{
n.Open();
SqlCommand komut = new SqlCommand("select sum(toplamfiyat) from sepet where musteriID='" + txt_aktifID.Text + "'", n);
txt_sepet_tutari.Text = komut.ExecuteScalar() + "TL";
n.Close();
sepet_toplam = txt_sepet_tutari.Text;
}
bool test;
private void IDkontrol()
{
test = true;
n.Open();
SqlCommand komut = new SqlCommand("select *from sepet where musteriID='" + txt_aktifID.Text + "'", n);
SqlDataReader read = komut.ExecuteReader();
while (read.Read())
{
if (txt_ID.Text == read["musteriID"].ToString())
{
test = false;
}
}
n.Close();
}
private void btn_sepet_Click(object sender, EventArgs e)
{
IDkontrol();
if (txt_ADET.Text == "0")
{
txt_ADET.Text = "1";
}
else if (txt_ADET.Text == "")
{
txt_ADET.Text = "1";
}
if(test==true)
{
n.Open();
SqlCommand komut4 = new SqlCommand("insert into sepet(musteriID,Ad,Fiyat,Adet,ID,ToplamFiyat) values(@musteriID,@Ad,@Fiyat,@Adet,@ID,@ToplamFiyat)", n);
komut4.Parameters.AddWithValue("@musteriID", txt_aktifID.Text);
komut4.Parameters.AddWithValue("@Ad", txt_AD.Text);
komut4.Parameters.AddWithValue("@Fiyat", float.Parse(txt_FIYAT.Text));
komut4.Parameters.AddWithValue("@Adet", txt_ADET.Text);
komut4.Parameters.AddWithValue("@ID", txt_ID.Text);
komut4.Parameters.AddWithValue("@ToplamFiyat", float.Parse(txt_toplam_fiyat.Text));
komut4.ExecuteNonQuery();
n.Close();
}
else
{
n.Open();
SqlCommand komut2 = new SqlCommand("update sepet set Adet=Adet+'" + int.Parse(txt_ADET.Text) + "'where ID='" + txt_ID.Text + "'", n);
komut2.ExecuteNonQuery();
SqlCommand komut3 = new SqlCommand("update sepet set toplamfiyat=Adet*Fiyat where ID='" + txt_ID.Text + "'", n);
komut3.ExecuteNonQuery();
n.Close();
}
daset.Tables["sepet"].Clear();
SepetTablosu();
MessageBox.Show("Ürün sepete eklendi.");
SepetToplam();
}
private void txt_ADET_KeyPress(object sender, KeyPressEventArgs e)
{
e.Handled = !char.IsDigit(e.KeyChar) && !char.IsControl(e.KeyChar);
}
private void button5_Click(object sender, EventArgs e)
{
AnaSayfa don = new AnaSayfa();
don.Show();
this.Close();
}
private void dataGridView1_CellDoubleClick(object sender, DataGridViewCellEventArgs e)
{
txt_ID.Text = dataGridView1.CurrentRow.Cells["urunID"].Value.ToString();
txt_AD.Text = dataGridView1.CurrentRow.Cells["urun_ad"].Value.ToString();
txt_FIYAT.Text = dataGridView1.CurrentRow.Cells["urun_fiyat"].Value.ToString();
}
private void btn_sepet_cikar_Click(object sender, EventArgs e)
{
n.Open();
SqlCommand komut = new SqlCommand("delete from sepet where sepetID='" + dataGridView2.CurrentRow.Cells["sepetID"].Value.ToString() + "'", n);
komut.ExecuteNonQuery();
n.Close();
daset.Tables["sepet"].Clear();
SepetTablosu();
MessageBox.Show("Ürün sepetten silindi.");
SepetToplam();
}
private void txt_ADET_TextChanged(object sender, EventArgs e)
{
try
{
txt_toplam_fiyat.Text = (float.Parse(txt_ADET.Text) * float.Parse(txt_FIYAT.Text)).ToString();
}
catch (Exception)
{
}
}
private void txt_FIYAT_TextChanged(object sender, EventArgs e)
{
try
{
txt_toplam_fiyat.Text = (float.Parse(txt_ADET.Text) * float.Parse(txt_FIYAT.Text)).ToString();
}
catch (Exception)
{
}
}
private void button4_Click(object sender, EventArgs e)
{
Ödeme git = new Ödeme();
git.Show();
this.Close();
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Data.SqlClient;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
namespace siparisotomasyonu
{
public partial class YoneticiPanel : Form
{
public YoneticiPanel()
{
InitializeComponent();
}
SqlConnection z = new SqlConnection("Server=DESKTOP-R5P3ANL\\SQLEXPRESS;Database=siparisotomasyonu;trusted_connection=true;");
DataSet daset = new DataSet();
private void YoneticiPanel_Load(object sender, EventArgs e)
{
Urun_Guncelle();
}
private void Urun_Guncelle()
{
z.Open();
SqlDataAdapter adtr = new SqlDataAdapter("select *from urun", z);
adtr.Fill(daset, "urun");
dataGridView1.DataSource = daset.Tables["urun"];
z.Close();
}
private void dataGridView1_CellDoubleClick(object sender, DataGridViewCellEventArgs e)
{
txt_urun_id.Text = dataGridView1.CurrentRow.Cells["urunID"].Value.ToString();
txt_urun_ad.Text = dataGridView1.CurrentRow.Cells["urun_ad"].Value.ToString();
txt_urun_fiyat.Text = dataGridView1.CurrentRow.Cells["urun_fiyat"].Value.ToString();
txt_urun_aciklama.Text = dataGridView1.CurrentRow.Cells["urun_aciklama"].Value.ToString();
}
private void btn_guncelle_Click(object sender, EventArgs e)
{
z.Open();
SqlCommand komut = new SqlCommand("update urun set urun_ad=@urun_ad,urun_fiyat=@urun_fiyat,urun_aciklama=@urun_aciklama where urunID=@urunID", z);
komut.Parameters.AddWithValue("@urunID", txt_urun_id.Text);
komut.Parameters.AddWithValue("@urun_ad", txt_urun_ad.Text);
komut.Parameters.AddWithValue("@urun_fiyat", txt_urun_fiyat.Text);
komut.Parameters.AddWithValue("@urun_aciklama", txt_urun_aciklama.Text);
komut.ExecuteNonQuery();
z.Close();
daset.Tables["urun"].Clear();
Urun_Guncelle();
MessageBox.Show("Ürün bilgileri güncellendi.");
foreach (Control item in this.Controls)
{
if (item is TextBox)
{
item.Text = "";
}
}
}
private void btn_sil_Click(object sender, EventArgs e)
{
z.Open();
SqlCommand komut = new SqlCommand("delete from urun where urunID='"+dataGridView1.CurrentRow.Cells["urunID"].Value.ToString()+"'",z);
komut.ExecuteNonQuery();
z.Close();
MessageBox.Show("Kayıt silindi.");
daset.Tables["urun"].Clear();
Urun_Guncelle();
foreach (Control item in this.Controls)
{
if (item is TextBox)
{
item.Text = "";
}
}
}
private void btn_ekle_Click(object sender, EventArgs e)
{
z.Open();
SqlCommand komut = new SqlCommand("insert into urun(urun_ad,urun_fiyat,urun_aciklama) values(@urun_ad,@urun_fiyat,@urun_aciklama)", z);
komut.Parameters.AddWithValue("@urun_ad", txt_urun_ad.Text);
komut.Parameters.AddWithValue("@urun_fiyat", txt_urun_fiyat.Text);
komut.Parameters.AddWithValue("@urun_aciklama", txt_urun_aciklama.Text);
komut.ExecuteNonQuery();
z.Close();
daset.Tables["urun"].Clear();
Urun_Guncelle();
MessageBox.Show("Ürün kaydı eklendi.");
foreach (Control item in this.Controls)
{
if (item is TextBox)
{
item.Text = "";
}
}
}
private void btn_panel_anamenu_Click(object sender, EventArgs e)
{
AnaSayfa don = new AnaSayfa();
don.Show();
this.Close();
}
private void btn_mus_takip_Click(object sender, EventArgs e)
{
MusteriTakip takip = new MusteriTakip();
takip.Show();
this.Close();
}
private void dataGridView1_CellContentClick(object sender, DataGridViewCellEventArgs e)
{
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Data.SqlClient;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
namespace siparisotomasyonu
{
public partial class YoneticiGiris : Form
{
public YoneticiGiris()
{
InitializeComponent();
}
SqlConnection x = new SqlConnection("Server=DESKTOP-R5P3ANL\\SQLEXPRESS;Database=siparisotomasyonu;trusted_connection=true;");
private void btn_anamenu_yonetici_Click(object sender, EventArgs e)
{
AnaSayfa don = new AnaSayfa();
don.Show();
this.Close();
}
private void btn_giris_yonetici_Click(object sender, EventArgs e)
{
x.Open();
string giris = "SELECT * from yonetici where yon_sifre=@yon_sifre";
SqlCommand komut = new SqlCommand(giris, x);
komut.Parameters.AddWithValue("@yon_sifre", txt_sifre_yonetici.Text);
SqlDataAdapter d = new SqlDataAdapter(komut);
SqlDataReader r = komut.ExecuteReader();
if(r.Read())
{
txt_sifre_yonetici.Text = r["yon_sifre"].ToString();
YoneticiPanel y = new YoneticiPanel();
y.Show();
this.Close();
}
else
MessageBox.Show("Hatalı giriş yaptınız. Lütfen tekrar deneyiniz.");
x.Close();
}
private void txt_sifre_yonetici_TextChanged_1(object sender, EventArgs e)
{
txt_sifre_yonetici.PasswordChar ='*';
}
private void checkBox1_CheckedChanged(object sender, EventArgs e)
{
if (checkBox1.Checked)
{
txt_sifre_yonetici.PasswordChar = '\0';
}
else
{
txt_sifre_yonetici.PasswordChar = '*';
}
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Data.SqlClient;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
namespace siparisotomasyonu
{
public partial class Ödeme : Form
{
public Ödeme()
{
InitializeComponent();
}
SqlConnection d = new SqlConnection("Server=DESKTOP-R5P3ANL\\SQLEXPRESS;Database=siparisotomasyonu;trusted_connection=true;");
DataSet daset = new DataSet();
int aktifID = int.Parse(MusteriGiris.musteriIDsi);
string odemeyontem;
string adresbilgi;
string urnAd;
int urnAdet;
float urnFiyat;
float urnTFiyat;
private void AdresBul()
{
d.Open();
SqlCommand komut = new SqlCommand("Select adres from musteri where musteriID='" + aktifID + "'", d);
adresbilgi = (komut.ExecuteScalar()).ToString();
d.Close();
}
private void Ödeme_Load(object sender, EventArgs e)
{
cmb_odeme.Enabled = false;
txt_kartno.Enabled = false;
cmb_ay.Enabled = false;
cmb_yıl.Enabled = false;
txt_cvv.Enabled = false;
cmb_bankaadi.Enabled = false;
txt_cek.Enabled = false;
txt_odenecek_tutar.Text = Alisveris.sepet_toplam;
}
private void rdb_kapıda_CheckedChanged(object sender, EventArgs e)
{
if(rdb_kapıda.Checked==true)
{
odemeyontem = "Kapıda Ödeme";
}
}
private void rdb_kart_CheckedChanged(object sender, EventArgs e)
{
if(rdb_kart.Checked==true)
{
cmb_odeme.Enabled = true;
txt_kartno.Enabled = true;
cmb_ay.Enabled = true;
cmb_yıl.Enabled = true;
txt_cvv.Enabled = true;
odemeyontem = "Kredi/Banka Kartı";
}
else
{
cmb_odeme.Enabled = false;
txt_kartno.Enabled = false;
cmb_ay.Enabled = false;
cmb_yıl.Enabled = false;
txt_cvv.Enabled = false;
}
}
private void rdb_cek_CheckedChanged(object sender, EventArgs e)
{
if(rdb_cek.Checked==true)
{
cmb_bankaadi.Enabled = true;
txt_cek.Enabled = true;
odemeyontem = "Çekle Ödeme";
}
else
{
cmb_bankaadi.Enabled = false;
txt_cek.Enabled = false;
}
}
private void btn_geri_don_Click(object sender, EventArgs e)
{
Alisveris don = new Alisveris();
don.Show();
this.Close();
}
private void btn_tamamla_Click(object sender, EventArgs e)
{
DateTime urnTarih = DateTime.Now;
int i = 1;
AdresBul();
d.Open();
SqlCommand komut = new SqlCommand("select *from sepet where musteriID='" + aktifID + "'", d);
SqlDataReader read = komut.ExecuteReader();
while (read.Read())
{
if (Convert.ToInt32(read["musteriID"]) == aktifID)
{
i++;
}
}
d.Close();
for (int j = 1; j < i; j++)
{
d.Open();
SqlCommand komut3 = new SqlCommand("select *from sepet where musteriID='" + aktifID + "'", d);
SqlDataReader read2 = komut3.ExecuteReader();
int h = 1;
while (read2.Read())
{
if (j == h)
{
urnAd = read2["Ad"].ToString();
urnAdet = Convert.ToInt32(read2["Adet"]);
urnFiyat = Convert.ToSingle(read2["Fiyat"]);
urnTFiyat = Convert.ToSingle(read2["toplamfiyat"]);
break;
}
h++;
}
d.Close();
d.Open();
SqlCommand komut2 = new SqlCommand("insert into satisveri(musteriID,urunAD,urunADET,urunFIYAT,urunTFIYAT,urunTARIH,urunODEME,urunADRES) values(@musteriID,@urunAD,@urunADET,@urunFIYAT,@urunTFIYAT,@urunTARIH,@urunODEME,@urunADRES)", d);
komut2.Parameters.AddWithValue("@musteriID", aktifID);
komut2.Parameters.AddWithValue("@urunAD", urnAd);
komut2.Parameters.AddWithValue("@urunADET", urnAdet);
komut2.Parameters.AddWithValue("@urunFIYAT", urnFiyat);
komut2.Parameters.AddWithValue("@urunTFIYAT", urnTFiyat);
komut2.Parameters.AddWithValue("@urunTARIH", urnTarih);
komut2.Parameters.AddWithValue("@urunODEME", odemeyontem);
komut2.Parameters.AddWithValue("@urunADRES", adresbilgi);
komut2.ExecuteNonQuery();
d.Close();
}
SepetTemizle();
if (rdb_kapıda.Checked == true)
{
MessageBox.Show("Siparişiniz alındı.\nBizi tercih ettiğiniz için teşekkür ederiz.");
}
else
{
MessageBox.Show("Ödeme başarılı.\nBizi tercih ettiğiniz için teşekkür ederiz.");
}
Alisveris don = new Alisveris();
don.Show();
this.Close();
}
private void SepetTemizle()
{
d.Open();
SqlCommand komut4 = new SqlCommand("delete from sepet where musteriID='" + aktifID + "'",d);
komut4.ExecuteNonQuery();
d.Close();
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Data.SqlClient;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
namespace siparisotomasyonu
{
public partial class MusteriTakip : Form
{
public MusteriTakip()
{
InitializeComponent();
}
SqlConnection b = new SqlConnection("Server=DESKTOP-R5P3ANL\\SQLEXPRESS;Database=siparisotomasyonu;trusted_connection=true;");
DataSet daset = new DataSet();
private void MusteriTakip_Load(object sender, EventArgs e)
{
MusteriGuncelle();
}
private void Temizle()
{
b.Open();
SqlDataAdapter adtr = new SqlDataAdapter("select *from satisveri where musteriID='" + txt_musid.Text + "'", b);
adtr.Fill(daset, "satisveri");
daset.Tables["satisveri"].Clear();
dataGridView2.DataSource = daset.Tables["satisveri"];
b.Close();
}
private void AlisverisListele()
{
b.Open();
SqlDataAdapter adtr = new SqlDataAdapter("select *from satisveri where musteriID='" + txt_musid.Text + "'", b);
adtr.Fill(daset, "satisveri");
dataGridView2.DataSource = daset.Tables["satisveri"];
b.Close();
}
private void MusteriGuncelle()
{
b.Open();
SqlDataAdapter adtr = new SqlDataAdapter("select *from musteri", b);
adtr.Fill(daset, "musteri");
dataGridView1.DataSource = daset.Tables["musteri"];
b.Close();
}
private void button3_Click(object sender, EventArgs e)//MÜŞTERİ SİL BUTONU
{
b.Open();
SqlCommand komut = new SqlCommand("delete from musteri where musteriID='" + dataGridView1.CurrentRow.Cells["musteriID"].Value.ToString() + "'", b);
komut.ExecuteNonQuery();
b.Close();
MessageBox.Show("Müşteri silindi.");
daset.Tables["musteri"].Clear();
MusteriGuncelle();
foreach (Control item in this.Controls)
{
if (item is TextBox)
{
item.Text = "";
}
}
}
private void btn_menuye_don_Click(object sender, EventArgs e)
{
AnaSayfa don = new AnaSayfa();
don.Show();
this.Close();
}
private void btn_panele_don_Click(object sender, EventArgs e)
{
YoneticiPanel panel = new YoneticiPanel();
panel.Show();
this.Close();
}
private void dataGridView1_CellDoubleClick(object sender, DataGridViewCellEventArgs e)
{
Temizle();
txt_musid.Text = dataGridView1.CurrentRow.Cells["musteriID"].Value.ToString();
AlisverisListele();
}
}
}
|
8ee3333fe1359df13acba8b94f6391869147f020
|
[
"Markdown",
"C#"
] | 9 |
Markdown
|
omerbilge35/OrderAutomation
|
081aa585e5a55b77186b26bcf98444ca9c82828e
|
99d4b5c12208fb92a0d752b00d21bb64bc772437
|
refs/heads/master
|
<file_sep>import React from 'react';
import { confirmAlert } from 'react-confirm-alert';
import { Link } from 'react-router-dom';
import memoriesData from '../../../helpers/data/memoriesData';
import './SingleMemory.scss';
import 'react-confirm-alert/src/react-confirm-alert.css';
class SingleMemory extends React.Component {
state = {
memory: {},
}
componentDidMount() {
const { memoryId } = this.props.match.params;
memoriesData.getSingleMemory(memoryId)
.then((response) => this.setState({ memory: response.data }))
.catch((err) => console.error('could not get single memory:', err));
}
deleteMemory = () => {
const memoryForDelete = this.props.match.params.memoryId;
const parentTripId = this.state.memory.tripId;
memoriesData.deleteSingleMemory(memoryForDelete)
.then(() => {
this.props.history.push(`/trips/${parentTripId}`);
})
.catch((err) => console.error('could not delete this memory:', err));
};
submit = () => {
const { memory } = this.state;
confirmAlert({
title: 'Wait a second...',
message: `You want to delete "${memory.name}"?`,
buttons: [
{
className: 'confirm-btn-yes',
label: 'Yes, delete it',
onClick: this.deleteMemory,
},
{
className: 'confirm-btn-no',
label: 'Never mind',
},
],
});
};
render() {
const { memory } = this.state;
const { memoryId } = this.props.match.params;
const singleTripLink = `/trips/${memory.tripId}`;
const editMemoryLink = `/memories/edit/${memoryId}`;
return (
<div className="SingleMemory col-12">
<h1>{memory.name}</h1>
<h4 className="font-bubblegum">{memory.location} - {memory.date}</h4>
<h4 className="font-cyan">{memory.notes}</h4>
<img className="col-sm-6" src={memory.imageUrl} alt="memory"/>
<div className="containter button-container col-12"><br/>
<Link className="btn button-acid mr10 mb10 font-marker" to={singleTripLink}><i className="fas fa-backward"></i> Go Back</Link>
<Link className="btn button-cyan mr10 mb10 font-marker" to={editMemoryLink}><i className="far fa-edit"></i> Edit Details</Link>
<button className="btn button-purple mb10 font-marker" onClick={this.submit}><i className="far fa-trash-alt"></i> Delete This Memory</button>
</div>
</div>
);
}
}
export default SingleMemory;
<file_sep>import React from 'react';
import authData from '../../../helpers/data/authData';
import tripsData from '../../../helpers/data/tripsData';
import TripCard from '../../shared/TripCard/TripCard';
import './Trips.scss';
class Trips extends React.Component {
state = {
trips: [],
}
getTrips = () => {
const uid = authData.getUid();
tripsData.getTripsByUid(uid)
.then((trips) => this.setState({ trips }))
.catch((err) => console.error('could not get trips', err));
};
componentDidMount() {
this.getTrips();
}
render() {
const { trips } = this.state;
const buildTrips = trips.map((trip) => (
<TripCard key={trip.id} trip={trip}/>
));
return (
<div className="Trips col-12">
<h1>My Trips</h1>
<div className="d-flex flex-wrap col-12">
{buildTrips}
</div>
</div>
);
}
}
export default Trips;
<file_sep>import React from 'react';
import { Link } from 'react-router-dom';
import tripShape from '../../../helpers/propz/tripShape';
import './TripCard.scss';
class TripCard extends React.Component {
static propTypes = {
trip: tripShape.tripShape,
}
render() {
const { trip } = this.props;
const singleLink = `/trips/${trip.id}`;
return (
<div className="TripCard col-sm-4">
<Link className="card custom-card" to={singleLink}>
<div className="card-body low-pad">
<h5 className="card-title">{trip.name}</h5>
<p className="card-text">{trip.startDate}</p>
</div>
</Link>
</div>
);
}
}
export default TripCard;
<file_sep>import axios from 'axios';
import firebaseConfig from '../apiKeys.json';
const baseUrl = firebaseConfig.firebaseKeys.databaseURL;
const getTripsByUid = (uid) => new Promise((resolve, reject) => {
axios.get(`${baseUrl}/trips.json?orderBy="uid"&equalTo="${uid}"`)
.then((response) => {
const fbTrips = response.data;
const tripsArray = [];
if (fbTrips) {
Object.keys(fbTrips).forEach((fbId) => {
fbTrips[fbId].id = fbId;
tripsArray.push(fbTrips[fbId]);
});
}
resolve(tripsArray);
})
.catch((err) => reject(err));
});
const getSingleTrip = (tripId) => axios.get(`${baseUrl}/trips/${tripId}.json`);
const postNewTrip = (newTrip) => axios.post(`${baseUrl}/trips.json`, newTrip);
const deleteTrip = (tripId) => axios.delete(`${baseUrl}/trips/${tripId}.json`);
const updateTrip = (tripId, updatedTripObj) => axios.put(`${baseUrl}/trips/${tripId}.json`, updatedTripObj);
export default {
getTripsByUid,
getSingleTrip,
postNewTrip,
deleteTrip,
updateTrip,
};
<file_sep># NSS Frontend Capstone: "RE:visit"
### Requirements:
- [x] React - *Creation and Routing*
- [x] Firebase - *Authentication, Database, and Hosting*
- [x] Github - *Version Control and Project Planning*
- [x] Display comprehension and mastery of technologies from the first 6 months of frontend developer bootcamp
### Technologies Used:
> - [HTML5](https://developer.mozilla.org/en-US/docs/Web/HTML)
> - [CSS](https://developer.mozilla.org/en-US/docs/Glossary/CSS), [Sass](https://sass-lang.com/)
> - [JavaScript](https://developer.mozilla.org/en-US/docs/Glossary/JavaScript), [JSX]()
> - [Bootstrap](https://getbootstrap.com/), [Reactstrap](https://reactstrap.github.io/)
> - [Moment.js](https://momentjs.com/)
> - [React Datepicker](https://github.com/Hacker0x01/react-datepicker)
> - [React Confirm Alert](https://github.com/GA-MO/react-confirm-alert)
### Description:
##### "Your Travel Scrapbook"
> Users access their account with Google authenticated login.
> Create Trips with user-customized names and dates, and then add Memories, like a scrapbook. Memories can be Adventures, Food & Drink, Photos, or Notes.
> Users can edit or delete their Trips and Memories.
#### Landing Page for Unauthenticated Users
<img src="src/screenshots/LoginPage.png" width="300">
#### Dashboard for Authenticated Users (navigation open)
<img src="src/screenshots/DashboardWithOpenToggle.png" width="300">
#### Single Trip (with associated Memories)
<img src="src/screenshots/SingleTrip.png" width="300">
#### Single Memory details
<img src="src/screenshots/SingleMemory.png" width="300">
#### Adding a new Trip
<img src="src/screenshots/NewTrip.png" width="300">
#### Adding a new Memory
<img src="src/screenshots/NewMemory.png" width="300">
#### Delete confirmation
<img src="src/screenshots/DeleteConfirm.png" width="300">
### Instructions to Run:
[Click here to view the deployed web app](https://re-visit-app.web.app/)
Or, Run Locally:
1. If you do not have npm http-server installed, follow instuctions [here](https://www.npmjs.com/package/http-server) to install on your device
1. Use GitHub's [Cloning Feature](https://help.github.com/en/github/creating-cloning-and-archiving-repositories/cloning-a-repository) to download a local copy of the files for this project
1. In your command line interface, change directory into the folder that contains your copied files
1. Enter command: `npm install` and wait for completion
1. Enter command: `npm start`
1. The project will automatically render in your browser at url: `http://localhost:8080`
### Specs:
Instructor: [<NAME>](https://github.com/zoeames)
[Nashville Software School](https://github.com/nashville-software-school)
<file_sep>import axios from 'axios';
import firebaseConfig from '../apiKeys.json';
const baseUrl = firebaseConfig.firebaseKeys.databaseURL;
const getCategories = () => new Promise((resolve, reject) => {
axios.get(`${baseUrl}/categories.json`)
.then((response) => {
const categories = response.data;
const catsArray = [];
if (categories) {
Object.keys(categories).forEach((catId) => {
categories[catId].id = catId;
catsArray.push(categories[catId]);
});
}
resolve(catsArray);
})
.catch((err) => console.error('could not get categories', err));
});
export default { getCategories };
<file_sep>import React from 'react';
import { Link } from 'react-router-dom';
import memoryShape from '../../../helpers/propz/memoryShape';
import './MemoryCard.scss';
class MemoryCard extends React.Component {
static propTypes = {
memory: memoryShape.memoryShape,
}
render() {
const { memory } = this.props;
const singleMemLink = `/memories/${memory.id}`;
return (
<div className="MemoryCard col-sm-6">
<Link className="card custom-card" to={singleMemLink}>
<div className="card-body row pad-zero">
<div className="col-2 pad-zero">
<h3>
{(() => {
switch (this.props.memory.categoryId) {
case 'category1': return <i className="fas fa-utensils"></i>;
case 'category2': return <i className="fas fa-hiking"></i>;
case 'category3': return <i className="fas fa-camera"></i>;
default: return <i className="far fa-comment-dots"></i>;
}
})()}
</h3>
</div>
<div className="col-10 pad-zero">
<h3 className="card-title">{memory.name}</h3>
</div>
</div>
</Link>
</div>
);
}
}
export default MemoryCard;
<file_sep>import React from 'react';
import { NavLink as RRNavLink } from 'react-router-dom';
import {
Collapse,
Navbar,
NavbarToggler,
NavbarBrand,
Nav,
NavItem,
NavLink,
} from 'reactstrap';
import PropTypes from 'prop-types';
import firebase from 'firebase/app';
import 'firebase/auth';
import './ThisNavbar.scss';
class ThisNavbar extends React.Component {
static propTypes = {
authed: PropTypes.bool.isRequired,
}
state = {
isOpen: false,
}
logUserOut = (e) => {
e.preventDefault();
firebase.auth().signOut();
}
logUserIn = (e) => {
e.preventDefault();
const provider = new firebase.auth.GoogleAuthProvider();
firebase.auth().signInWithPopup(provider);
}
toggle = () => {
this.setState({ isOpen: !this.state.isOpen });
}
render() {
const { isOpen } = this.state;
const buildNavbar = () => {
const { authed } = this.props;
if (authed) {
return (
<Nav className="ml-auto" navbar>
<NavItem>
<NavLink tag={RRNavLink} to='/trips'><i className="far fa-compass"></i> Trips</NavLink>
</NavItem>
<NavItem>
<NavLink tag={RRNavLink} to='/trips/new'><i className="fas fa-map-pin"></i> New Trip</NavLink>
</NavItem>
<NavItem>
<NavLink onClick={this.logUserOut}><i className="fas fa-sign-out-alt"></i> Logout</NavLink>
</NavItem>
</Nav>
);
}
return (
<Nav className="ml-auto" navbar>
<NavItem>
<NavLink onClick={this.logUserIn}><i className="fas fa-sign-in-alt"></i> Login</NavLink>
</NavItem>
</Nav>
);
};
return (
<div className="ThisNavbar">
<Navbar className="navbar-custom" expand="sm">
<NavbarBrand href="/">RE:visit</NavbarBrand>
<NavbarToggler className="navbar-dark" onClick={this.toggle} />
<Collapse isOpen={isOpen} navbar>
{buildNavbar()}
</Collapse>
</Navbar>
</div>
);
}
}
export default ThisNavbar;
<file_sep>import PropTypes from 'prop-types';
const tripShape = PropTypes.shape({
name: PropTypes.string.isRequired,
startDate: PropTypes.string.isRequired,
endDate: PropTypes.string.isRequired,
uid: PropTypes.string.isRequired,
});
export default { tripShape };
<file_sep>import React from 'react';
import firebase from 'firebase/app';
import 'firebase/auth';
import './Auth.scss';
class Auth extends React.Component {
logUserIn = (e) => {
e.preventDefault();
const provider = new firebase.auth.GoogleAuthProvider();
firebase.auth().signInWithPopup(provider);
}
render() {
return (
<div className="Auth col-12">
<h1>RE:visit</h1>
<h2 className="font-marker font-cyan">Your travel scrapbook</h2>
<h4 className="font-bubblegum">RE:visit is designed for capturing special memories from vacations, adventures, and trips.</h4>
<h4 className="font-bubblegum">Use the Login button below to get started</h4>
<button className="btn button-acid" onClick={this.logUserIn}>Login with Google</button>
</div>
);
}
}
export default Auth;
<file_sep>import React from 'react';
import firebase from 'firebase/app';
import 'firebase/auth';
import {
BrowserRouter,
Route,
Redirect,
Switch,
} from 'react-router-dom';
import './App.scss';
import ThisNavbar from '../components/shared/ThisNavbar/ThisNavbar';
import Auth from '../components/pages/Auth/Auth';
import EditMemory from '../components/pages/EditMemory/EditMemory';
import EditTrip from '../components/pages/EditTrip/EditTrip';
import NewMemory from '../components/pages/NewMemory/NewMemory';
import NewTrip from '../components/pages/NewTrip/NewTrip';
import SingleMemory from '../components/pages/SingleMemory/SingleMemory';
import SingleTrip from '../components/pages/SingleTrip/SingleTrip';
import Trips from '../components/pages/Trips/Trips';
import fbConnection from '../helpers/data/connection';
fbConnection();
const PublicRoute = ({ component: Component, authed, ...rest }) => {
const routeChecker = (props) => (authed === false
? (<Component {...props} />)
: (<Redirect to={{ pathname: '/home', state: { from: props.location } }} />));
return <Route {...rest} render={(props) => routeChecker(props)} />;
};
const PrivateRoute = ({ component: Component, authed, ...rest }) => {
const routeChecker = (props) => (authed === true
? (<Component {...props} />)
: (<Redirect to={{ pathname: '/auth', state: { from: props.location } }} />));
return <Route {...rest} render={(props) => routeChecker(props)} />;
};
class App extends React.Component {
state = {
authed: false,
}
componentDidMount() {
this.removeListener = firebase.auth().onAuthStateChanged((user) => {
if (user) {
this.setState({ authed: true });
} else {
this.setState({ authed: false });
}
});
}
componentWillUnmount() {
this.removeListener();
}
render() {
const { authed } = this.state;
return (
<div className="App">
<BrowserRouter>
<React.Fragment>
<ThisNavbar authed={authed}/>
<div className="container col-12">
<div className="col-12">
<Switch>
<PrivateRoute path='/trips/edit/:tripId' component={EditTrip} authed={authed} />
<PrivateRoute path='/trips/new' component={NewTrip} authed={authed} />
<PrivateRoute path='/trips/:tripId' component={SingleTrip} authed={authed} />
<PrivateRoute path='/trips' component={Trips} authed={authed} />
<PrivateRoute path='/memories/new' component={NewMemory} authed={authed} />
<PrivateRoute path='/memories/edit/:memoryId' component={EditMemory} authed={authed} />
<PrivateRoute path='/memories/:memoryId' component={SingleMemory} authed={authed} />
<PublicRoute path='/auth' component={Auth} authed={authed} />
<Redirect from="*" to="/trips"/>
</Switch>
</div>
</div>
</React.Fragment>
</BrowserRouter>
</div>
);
}
}
export default App;
<file_sep>import React from 'react';
import { confirmAlert } from 'react-confirm-alert';
import { Link } from 'react-router-dom';
import MemoryCard from '../../shared/MemoryCard/MemoryCard';
import tripsData from '../../../helpers/data/tripsData';
import memoriesData from '../../../helpers/data/memoriesData';
import './SingleTrip.scss';
import 'react-confirm-alert/src/react-confirm-alert.css';
class SingleTrip extends React.Component {
state = {
trip: {},
memories: [],
}
componentDidMount() {
const { tripId } = this.props.match.params;
tripsData.getSingleTrip(tripId)
.then((response) => this.setState({ trip: response.data }))
.catch((err) => console.error('could not get single trip:', err));
memoriesData.getMemoriesByTripId(tripId)
.then((memsArray) => this.setState({ memories: memsArray }))
.catch((err) => console.error('could not get memories for this trip:', err));
}
deleteTripAndMemories = () => {
const tripForDelete = this.props.match.params.tripId;
tripsData.deleteTrip(tripForDelete)
.then(() => {
memoriesData.deleteMemoriesByTripId(tripForDelete);
this.props.history.push('/trips');
})
.catch((err) => console.error('could not delete trip:', err));
};
submit = () => {
const { trip } = this.state;
confirmAlert({
title: 'Wait a second...',
message: `You want to delete "${trip.name}" and all its memories?`,
buttons: [
{
className: 'confirm-btn-yes',
label: 'Yes, delete it',
onClick: this.deleteTripAndMemories,
},
{
className: 'confirm-btn-no',
label: 'Never mind',
},
],
});
};
render() {
const { trip, memories } = this.state;
const { tripId } = this.props.match.params;
const buildMemories = memories.map((mem) => (
<MemoryCard key={mem.id} memory={mem}/>
));
const newMemoryLink = '/memories/new';
const editTripLink = `/trips/edit/${tripId}`;
return (
<div className="SingleTrip col-12">
<h1>{trip.name}</h1>
<h4 className="font-bubblegum">{trip.startDate} - {trip.endDate}</h4>
<Link className="btn button-cyan mb10 mr10 low-pad font-marker" to={{ pathname: newMemoryLink, tripId }}><i className="fas fa-plus"></i> New Memory</Link>
<div className="container row flex-wrap col-12">
{buildMemories}
</div>
<div className="container button-container col-12">
<Link className="btn button-acid mb10 mr10 font-marker" to={{ pathname: editTripLink, tripId }}><i className="far fa-edit"></i> Edit Trip Details</Link>
<button className="btn button-purple mb10 font-marker" onClick={this.submit}><i className="far fa-trash-alt"></i> Delete This Trip</button>
</div>
</div>
);
}
}
export default SingleTrip;
<file_sep>import React from 'react';
import DatePicker from 'react-datepicker';
import moment from 'moment';
import authData from '../../../helpers/data/authData';
import memoriesData from '../../../helpers/data/memoriesData';
import './NewMemory.scss';
import 'react-datepicker/dist/react-datepicker.css';
class NewMemory extends React.Component {
state = {
memoryName: '',
memoryDate: '',
memoryImageUrl: '',
memoryLocation: '',
memoryCategoryId: '',
memoryNotes: '',
memoryTripId: '',
memoryIsFavorite: false,
}
nameChange = (e) => {
e.preventDefault();
this.setState({ memoryName: e.target.value });
}
dateChange = (date) => {
this.setState({ memoryDate: date });
}
imageChange = (e) => {
e.preventDefault();
this.setState({ memoryImageUrl: e.target.value });
}
locationChange = (e) => {
e.preventDefault();
this.setState({ memoryLocation: e.target.value });
}
categoryChange = () => {
const categoryButtons = document.getElementsByName('category');
for (let i = 0, { length } = categoryButtons; i < length; i += 1) {
if (categoryButtons[i].checked) {
const selectedCategory = categoryButtons[i].id;
this.setState({ memoryCategoryId: selectedCategory });
}
}
}
notesChange = (e) => {
e.preventDefault();
this.setState({ memoryNotes: e.target.value });
}
tripIdChange = (e) => {
e.preventDefault();
this.setState({ memoryTripId: e.target.value });
}
saveMemory= (e) => {
e.preventDefault();
const {
memoryName,
memoryDate,
memoryImageUrl,
memoryLocation,
memoryCategoryId,
memoryNotes,
} = this.state;
const newMemory = {
name: memoryName,
date: moment(memoryDate).format('MM/DD/YYYY'),
imageUrl: memoryImageUrl,
location: memoryLocation,
categoryId: memoryCategoryId,
notes: memoryNotes,
tripId: this.props.location.tripId,
uid: authData.getUid(),
};
memoriesData.postNewMemory(newMemory)
.then(() => this.props.history.push(`/trips/${this.props.location.tripId}`))
.catch((err) => console.error('could not post new memory:', err));
};
render() {
const {
memoryName,
memoryDate,
memoryImageUrl,
memoryLocation,
memoryNotes,
} = this.state;
const placeholderDate = moment(memoryDate).format('MM/DD/YYYY');
return (
<div className="NewMemory col-12">
<h1>New Memory</h1>
<form className="col-md-6 offset-md-3">
<div className="form-group">
<label className="label-custom font-midnight" htmlFor="memory-name">Memory Name</label>
<input type="text" placeholder="Rock climbing, Tiki Bar, etc..." className="form-control" id="memory-name" value={memoryName} onChange={this.nameChange} aria-describedby="memNameHelp"/>
</div>
<div className="form-check row" htmlFor="category">
<input className="form-check-input" type="radio" name="category" id="category1" onChange={this.categoryChange}/>
<label className="form-check-label font-bubblegum" htmlFor="FoodDrink">Food & Drink</label>
</div><div className="form-check row" htmlFor="category">
<input className="form-check-input" type="radio" name="category" id="category2" onChange={this.categoryChange}/>
<label className="form-check-label font-bubblegum" htmlFor="Adventure">Adventure</label>
</div><div className="form-check row" htmlFor="category">
<input className="form-check-input" type="radio" name="category" id="category3" onChange={this.categoryChange}/>
<label className="form-check-label font-bubblegum" htmlFor="Photo">Photo</label>
</div><div className="form-check row" htmlFor="category">
<input className="form-check-input" type="radio" name="category" id="category4" onChange={this.categoryChange}/>
<label className="form-check-label font-bubblegum" htmlFor="Note">Note</label>
</div>
<div className="form-group">
<label className="label-custom" htmlFor="memory-date">Date</label>
<br></br>
<DatePicker
className="picker"
placeholderText={placeholderDate}
onChange={this.dateChange}
dateFormat={'MM/dd/yyyy'}
/>
</div>
<div className="form-group">
<label className="label-custom" htmlFor="memory-imageUrl">Image Url</label>
<input type="text" placeholder="paste your image url here" className="form-control"
id="memory-imageUrl" value={memoryImageUrl} onChange={this.imageChange} aria-describedby="memImageHelp"/>
</div>
<div className="form-group">
<label className="label-custom" htmlFor="memory-location">Location</label>
<input type="text" placeholder="place, neighborhood, etc..." className="form-control"
id="memory-location" value={memoryLocation} onChange={this.locationChange} aria-describedby="memLocationHelp"/>
</div>
<div className="form-group">
<label className="label-custom" htmlFor="memory-notes">Notes</label>
<input type="text" placeholder="This was awesome because..." className="form-control" id="memory-notes" value={memoryNotes} onChange={this.notesChange} aria-describedby="memNotesHelp"/>
</div>
<button type="submit" className="btn button-acid" onClick={this.saveMemory}><i className="fas fa-check"></i> Save</button>
</form>
</div>
);
}
}
export default NewMemory;
<file_sep>import axios from 'axios';
import firebaseConfig from '../apiKeys.json';
const baseUrl = firebaseConfig.firebaseKeys.databaseURL;
const getMemoriesByTripId = (tripId) => new Promise((resolve, reject) => {
axios.get(`${baseUrl}/memories.json?orderBy="tripId"&equalTo="${tripId}"`)
.then((response) => {
const memories = response.data;
const memsArray = [];
if (memories) {
Object.keys(memories).forEach((memId) => {
memories[memId].id = memId;
memsArray.push(memories[memId]);
});
}
resolve(memsArray);
})
.catch((err) => reject(err));
});
const getSingleMemory = (memoryId) => axios.get(`${baseUrl}/memories/${memoryId}.json`);
const postNewMemory = (newMemory) => axios.post(`${baseUrl}/memories.json`, newMemory);
const deleteSingleMemory = (memoryId) => axios.delete(`${baseUrl}/memories/${memoryId}.json`);
const deleteMemoriesByTripId = (tripId) => new Promise((resolve, reject) => {
axios.get(`${baseUrl}/memories.json?orderBy="tripId"&equalTo="${tripId}"`)
.then((response) => {
const memories = response.data;
if (memories) {
Object.keys(memories).forEach((mem) => {
axios.delete(`${baseUrl}/memories/${mem}.json`);
});
}
resolve();
})
.catch((err) => reject(err));
});
const updateMemory = (memoryId, updatedMemObj) => axios.put(`${baseUrl}/memories/${memoryId}.json`, updatedMemObj);
export default {
getMemoriesByTripId,
getSingleMemory,
postNewMemory,
deleteMemoriesByTripId,
deleteSingleMemory,
updateMemory,
};
|
982acb22dd4790ae8a0811242d604405807c4e12
|
[
"JavaScript",
"Markdown"
] | 14 |
JavaScript
|
spookycutie08/re-visit
|
af890f6998331ddc36b8a99b45043b480f96c9e0
|
e077774db515d5e93527777e6065110810fc6b17
|
refs/heads/master
|
<file_sep># Simulink-DSP-Viewer
This is a basic Matlab Simulink Model and C template for practicing with DSP.
# How-to
Simply open the .slx file using Matlab Simulink and run everything from there.
The workflow goes as follows:
1- Add your DSP code into the C file;
2- Execute the Simulink simulation;
3- Analyse results on the simulation.
It's that simple.
Also, you can add your own blocks to the Simulink model, so you're not limited to the C program.
<file_sep>#ifndef _DSP_H_
#define _DSP_H_
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include <string.h>
#define STRSTR(x) #x
#define STR(x) STRSTR(x)
#ifndef M_PI
#define M_PI 3.14159265358979323846
#endif
#define MAX_SIGNAL_COUNT 16
#define foreach(s, x, y) for(float * y = s.samples, x ## r = s.resolution, x = s.lower_bound / s.resolution; y <= s.samples; y++) for(int x ## n = 0; x ## n < s.sample_count; x ## n++, x += x ## r)
#define forrange(s, x, y, start, end) for(float * y = s.samples, x ## r = s.resolution, x = (start) / s.resolution; y <= s.samples; y++) for(int x ## n = (int)(start); x ## n < s.sample_count && x ## n < (end); x ## n++, x += x ## r)
#define var(s, y) float * y = s.samples;
typedef struct {
int sample_count;
float lower_bound;
float upper_bound;
float resolution;
float * samples;
} signal_t;
struct {
int signal_count;
float max_lower_bound;
float max_upper_bound;
float max_resolution;
signal_t * signals[MAX_SIGNAL_COUNT];
} signal_export_fmt = {
0, 0.0f, 0.0f, 0.0f
};
#define export(s) _export(&s)
static char _export(signal_t * s) {
if(signal_export_fmt.signal_count >= MAX_SIGNAL_COUNT)
return 1;
signal_export_fmt.signals[signal_export_fmt.signal_count] = s;
signal_export_fmt.signal_count++;
if(signal_export_fmt.max_lower_bound > s->lower_bound)
signal_export_fmt.max_lower_bound = s->lower_bound;
if(signal_export_fmt.max_upper_bound < s->upper_bound)
signal_export_fmt.max_upper_bound = s->upper_bound;
if(signal_export_fmt.max_resolution < s->resolution)
signal_export_fmt.max_resolution = s->resolution;
return 0;
}
void export_to_csv(char * csv_filename) {
FILE * f = fopen(csv_filename, "w");
char buff[(MAX_SIGNAL_COUNT * 128) + 3];
for(float i = signal_export_fmt.max_lower_bound, i2 = 0; i <= signal_export_fmt.max_upper_bound; i += signal_export_fmt.max_resolution, i2++) {
sprintf(buff, "%.10f", i);
for(int j = 0; j < signal_export_fmt.signal_count; j++) {
float sample;
char floatStr[128];
if(i2 < signal_export_fmt.signals[j]->sample_count)
sample = signal_export_fmt.signals[j]->samples[(int)i2];
else
sample = 0.0f;
sprintf(buff, "%s;%.4f", buff, sample);
}
fprintf(f, "%s\n", buff);
}
fclose(f);
}
void signalgroup_end() {
for(int i = 0; i < signal_export_fmt.signal_count; i++)
free(signal_export_fmt.signals[i]->samples);
signal_export_fmt.signal_count = 0;
signal_export_fmt.max_lower_bound = 0.0f;
signal_export_fmt.max_upper_bound = 0.0f;
}
signal_t signal(float lower_bound, float upper_bound, float resolution) {
signal_t ret;
ret.sample_count = ((abs(lower_bound) + abs(upper_bound)) + 1) / resolution;
ret.lower_bound = lower_bound;
ret.upper_bound = upper_bound;
ret.resolution = resolution;
ret.samples = (float*)malloc(sizeof(float) * ret.sample_count);
memset(ret.samples, 0, sizeof(float) * ret.sample_count);
return ret;
}
/* Constants/Transformations */
#define pi M_PI
#define rad(degrees) ((degrees) * M_PI / 180)
#define deg(radians) ((radians) * 180 / M_PI)
#define sign(y) (((y) > 0) ? 1 : 0)
#endif
<file_sep>#include "dsp.h"
void DFT(char * csv_file) {
signal_t s1 = signal(0, 360, 0.25);
signal_t s2_re = signal(0, 360, 0.25);
signal_t s2_im = signal(0, 360, 0.25);
signal_t s2 = signal(0, 360, 0.25);
/* Create sinewave */
foreach(s1, x, y)
y[xn] = sin(rad(2 * pi * x)) + sin(rad(2 * pi * 2 * x)) + sin(rad(2 * pi * 4 * x));
/* Perform DFT */
foreach(s2_re, k, y_re) {
var(s2_im, y_im);
var(s2, y_spectrum);
y_re[kn] = 0;
y_im[kn] = 0;
foreach(s1, x, y) {
y_re[kn] += y[xn] * cos((pi / s1.sample_count) * x * (kn - (180 / kr)) * 2) / s1.sample_count;
y_im[kn] += y[xn] * sin((pi / s1.sample_count) * x * (kn - (180 / kr)) * 2) / s1.sample_count;
}
y_spectrum[kn] = sqrt(pow(y_re[kn], 2) + pow(y_im[kn], 2));
}
export(s1);
export(s2);
export_to_csv(csv_file);
signalgroup_end();
}
int main(int argc, char ** argv) {
DFT(argv[1]);
return 0;
}
|
ef6b508d6102ffb9b5e50fd3688490c2c67d9fe0
|
[
"Markdown",
"C"
] | 3 |
Markdown
|
miguelangelo78/Simulink-DSP-Viewer
|
98f7cc3a15a432f0f2ef21cc9f7df4804862663a
|
3a12c6d699cdfb920ec3134515b9b9affb82d5e5
|
refs/heads/master
|
<file_sep>using System;
using Microsoft.AspNetCore.Mvc;
using VersionamentoHeaderEspecifico.Api.DTO;
namespace VersionamentoHeaderEspecifico.Api.Controllers
{
[ApiController]
[Route("api/[controller]")]
[ApiVersion("1.0")]
[ApiVersion("2.0")]
public class MotosController : ControllerBase
{
[HttpGet("{id}")]
[MapToApiVersion("1.0")]
[Obsolete]
public IActionResult GetV1([FromRoute] int id)
{
var carroResponse = new MotoDTO(id, "Twister", "V1");
return Ok(carroResponse);
}
[HttpGet("{id}")]
[MapToApiVersion("2.0")]
public IActionResult GetV2([FromRoute] int id)
{
var carroResponse = new MotoDTO(id, "CB 500", "V2");
return Ok(carroResponse);
}
}
}
<file_sep>namespace VersionamentoURL.Api.DTO
{
public class MotoDTO : VeiculoDTO
{
public MotoDTO(int id, string nome, string versao)
: base(id, nome, versao)
{
}
}
}
<file_sep>using System;
using System.IO;
using System.Linq;
using System.Reflection;
using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Hosting;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Mvc.ApiExplorer;
using Microsoft.AspNetCore.Mvc.Versioning;
using Microsoft.AspNetCore.Routing;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Options;
using Swashbuckle.AspNetCore.SwaggerGen;
using Versionamento.Api.Infrastructure.Middlewares;
using Versionamento.Api.Swagger;
namespace Versionamento.Api
{
public class Startup
{
public Startup(IConfiguration configuration)
{
Configuration = configuration;
}
public IConfiguration Configuration { get; }
public void ConfigureServices(IServiceCollection services)
{
services.AddControllers();
services.AddApiVersioning(options =>
{
options.AssumeDefaultVersionWhenUnspecified = true;
options.DefaultApiVersion = ApiVersion.Default;
options.ApiVersionReader = ApiVersionReader.Combine(
new MediaTypeApiVersionReader("version"), //passa o version no header na Key accept. Ex: application/json;version=2.0
new HeaderApiVersionReader("api-version")); //aceita uma nova Key api-version no header
options.ReportApiVersions = true;
});
services.AddVersionedApiExplorer(options =>
{
// adiciona o API explorer versionado, que também adiciona o serviço IApiVersionDescriptionProvider
// observação: o código de formato especificado formatará a versão como "'v'major [.minor] [- status]"
options.GroupNameFormat = "'v'VVV";
// nota: esta opção só é necessária ao controlar a versão por segmento de url.
// o SubstitutionFormat também pode ser usado para controlar o formato da versão da API em modelos de rota
//options.SubstituteApiVersionInUrl = true;
});
services.AddTransient<IConfigureOptions<SwaggerGenOptions>, ConfigureSwaggerOptions>();
services.AddSwaggerGen(options =>
{
// adiciona um filtro de operação personalizado que define os valores padrão
options.OperationFilter<SwaggerDefaultValues>();
options.SchemaFilter<SwaggerExcludePropertySchemaFilter>();
options.ResolveConflictingActions(apiDescriptions => apiDescriptions.First());
var xmlFile = $"{Assembly.GetExecutingAssembly().GetName().Name}.xml";
var xmlPath = Path.Combine(AppContext.BaseDirectory, xmlFile);
options.IncludeXmlComments(xmlPath);
});
//URL da API em letras minúsculas: Carros -> carros
services.Configure<RouteOptions>(options => { options.LowercaseUrls = true; });
}
public void Configure(IApplicationBuilder app, IWebHostEnvironment env, IApiVersionDescriptionProvider provider)
{
if (env.IsDevelopment())
{
app.UseDeveloperExceptionPage();
app.UseSwagger(options => { options.RouteTemplate = "api-docs/{documentName}/docs.json"; });
app.UseSwaggerUI(options =>
{
options.RoutePrefix = "api-docs";
foreach (var description in provider.ApiVersionDescriptions)
options.SwaggerEndpoint($"/api-docs/{description.GroupName}/docs.json", description.GroupName.ToUpperInvariant());
});
}
app.UseApiExceptionHandling();
app.UseHttpsRedirection();
app.UseRouting();
app.UseAuthorization();
app.UseEndpoints(endpoints =>
{
endpoints.MapControllers();
});
}
}
}
<file_sep>using Microsoft.AspNetCore.Mvc.ApiExplorer;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Options;
using Microsoft.OpenApi.Models;
using Swashbuckle.AspNetCore.SwaggerGen;
namespace VersionamentoHeaderEspecifico.Api.Swagger
{
/// <summary>
/// Configura as opções de geração do Swagger.
/// </summary>
/// <remarks>Isso permite que o controle de versão da API defina um documento Swagger por versão da API após
/// <see cref="IApiVersionDescriptionProvider"/> o serviço ser resolvido a partir do container de serviço.</remarks>
public class ConfigureSwaggerOptions : IConfigureOptions<SwaggerGenOptions>
{
private readonly IApiVersionDescriptionProvider _provider;
/// <summary>
/// Inicializa uma nova instância do <see cref="ConfigureSwaggerOptions"/> class.
/// </summary>
/// <param name="provider">The <see cref="IApiVersionDescriptionProvider">provider</see> usado para gerar documentos Swagger.</param>
public ConfigureSwaggerOptions(IApiVersionDescriptionProvider provider) => _provider = provider;
/// <inheritdoc />
public void Configure(SwaggerGenOptions options)
{
// adicione um documento swagger para cada versão de API descoberta
foreach (var description in _provider.ApiVersionDescriptions)
options.SwaggerDoc(description.GroupName, CreateInfoForApiVersion(description));
}
private static OpenApiInfo CreateInfoForApiVersion(ApiVersionDescription description)
{
var info = new OpenApiInfo()
{
Title = "Exemplo de Versionamento de Api",
Version = description.ApiVersion.ToString()
};
//if (description.IsDeprecated)
// info.Description += " Essa versão da Api está obsoleta.";
return info;
}
}
}
<file_sep>namespace VersionamentoHeaderEspecifico.Api.DTO
{
public class BicicletaDTO : VeiculoDTO
{
public BicicletaDTO(int id, string nome, string versao)
: base(id, nome, versao)
{
}
}
}<file_sep>using Microsoft.AspNetCore.Builder;
namespace Versionamento.Api.Infrastructure.Middlewares
{
public static class MiddlewareExtensions
{
public static IApplicationBuilder UseApiExceptionHandling(this IApplicationBuilder app)
=> app.UseMiddleware<ApiExceptionHandlingMiddleware>();
}
}
<file_sep>using Microsoft.AspNetCore.Mvc;
using Versionamento.Api.DTO;
namespace Versionamento.Api.Controllers.V1
{
[ApiController]
[Route("api/v{version:apiVersion}/[controller]")] //Versionamento através da rota
[ApiVersion("1.0")]
public class BicicletasController : ControllerBase
{
[HttpGet("{id}")]
public IActionResult Get([FromRoute] int id)
{
var carroResponse = new MotoDTO(id, "GTS", "V1");
return Ok(carroResponse);
}
}
}
<file_sep>using System;
namespace VersionamentoURL.Api.Swagger
{
[AttributeUsage(AttributeTargets.Property)]
public class SwaggerIgnorePropertyAttribute : Attribute
{
}
}
<file_sep># Versionamento API
_Projeto que serve de referência para o versionamento de API .net_.
É um projeto proposto para ser clonado
e reutilizado como base para o versionamento de API.
> _Objetivo: exemplificar o versionamento de API_.
## Iniciando
- `git clone https://github.com/hugo-cesar/versionamento-api.git`
- `cd versionamento-api`
## Pré-requisitos
- `dotnet --version`<br>
Você deverá ver a indicação da versão do dotnet instalada. Para o projeto executar é necessária a versão 5.
Instruções para instalação pode ser encontrada em: [dotnet-5](https://docs.microsoft.com/pt-br/dotnet/core/install/windows?tabs=net50)
## Executando a aplicação
Abra o projeto na sua IDE favorita, escolha uma das API's (Set as Startup Project) e a execute. Ao fazer isso as api's serão apresentadas no navegador via swagger,
caso isso não oconteça, basta abrir o navegador e colocar _https://localhost:5001/api-docs/index.html_. Agora basta escolhar a versão/endpoint que desejar e executar
para verificar o resultado do versionamento.
<file_sep>using Microsoft.AspNetCore.Mvc;
using Versionamento.Api.DTO;
namespace Versionamento.Api.Controllers.V2
{
[ApiController]
[Route("api/v{version:apiVersion}/[controller]")] //Versionamento através da rota
[ApiVersion("2.0")]
public class BicicletasController : ControllerBase
{
[HttpGet("{id}")]
public IActionResult GetV1([FromRoute] int id)
{
var carroResponse = new MotoDTO(id, "MTB", "V2");
return Ok(carroResponse);
}
}
}
<file_sep>using VersionamentoURL.Api.Swagger;
namespace VersionamentoURL.Api.DTO
{
public abstract class VeiculoDTO
{
protected VeiculoDTO(int id, string nome, string versao)
{
Id = id;
Nome = nome;
Versao = versao;
}
[SwaggerIgnoreProperty]
public int Id { get; set; }
public string Nome { get; set; }
public string Versao { get; set; }
}
}
<file_sep>namespace VersionamentoURL.Api.DTO
{
public class CarroDTO : VeiculoDTO
{
public CarroDTO(int id, string nome, string versao)
: base(id, nome, versao)
{
}
}
}
<file_sep>using Microsoft.AspNetCore.Mvc;
using Versionamento.Api.DTO;
namespace Versionamento.Api.Controllers
{
[ApiController]
[Route("api/[controller]")]
[ApiVersion("1.0", Deprecated = true)]
[ApiVersion("2.0")]
[ApiVersion("3.0")]
public class CarrosController : ControllerBase
{
/// <summary>
/// Esta API recupera um carro pelo id
/// </summary>
/// <response code="200">Retorna um carro</response>
/// <response code="400">Apenas para demonstração</response>
[HttpGet("{id}")]
public IActionResult GetV1([FromRoute] int id)
{
// throw new DomainException($"Carro id: {id} não foi encontrado.");
var carroResponse = new CarroDTO(id, "Cruze", "V1");
return Ok(carroResponse);
}
[HttpGet("{id}")]
[MapToApiVersion("2.0")]
public IActionResult GetV2([FromRoute] int id)
{
var carroResponse = new CarroDTO(id, "Cruze", "V2");
return Ok(carroResponse);
}
[HttpGet("{id}")]
[MapToApiVersion("3.0")]
public IActionResult GetV3([FromRoute] int id)
{
var carroResponse = new CarroDTO(id, "Cruze", "V3");
return Ok(carroResponse);
}
[HttpPost]
[MapToApiVersion("1.0")]
public IActionResult PostV1([FromBody] CarroDTO carroDto)
{
return Ok();
}
}
}
|
0dfc6f73b9a987e6e30466a6f5db8d37f3155e8a
|
[
"Markdown",
"C#"
] | 13 |
C#
|
hugo-cesar/versionamento-api
|
906d314820bc05bda969aadf5f27c58200c2a8bc
|
6eb3c24a69478de61311a45ca1813bd0e764ba99
|
refs/heads/master
|
<repo_name>DhanushM12/HospitalApiWithUnitTesting<file_sep>/controllers/patients_controller.js
const Doctor = require("../models/doctor");
const Patient = require("../models/patient");
const Report = require("../models/report");
const enums = require("../config/status");
//register patient
module.exports.register = async function (req, res) {
if (req.body.phone == undefined) {
return res.status(206).json({
message: "Insufficient data",
});
}
let phone = req.body.phone;
//Checking if patient is already registered
let patientExists = await Patient.findOne({ phone: phone });
if (patientExists) {
return res.status(405).json({
data: {
patient: patientExists,
},
message: "Patient already exists",
});
}
try {
//Registering a new patient
let createdPatient = await Patient.create(req.body);
if (createdPatient) {
return res.status(200).json({
data: {
patient: createdPatient,
},
message: "Successfully registered patient",
});
} else {
return res.status(500).json({
message: "Server error",
});
}
} catch (err) {
return res.status(500).json({
message: `${err}`,
});
}
};
//Create a new report
module.exports.createReport = async function (req, res) {
let patientId = req.params.id;
let doctorId = req.body.doctor;
if (patientId == undefined || doctorId == undefined) {
return res.status(206).json({
message: "Incomplete data provided",
});
}
//enums mapping has been done in config. Used to get the status from the number
let st = req.body.status;
req.body.status = enums[st];
try {
let patient = await Patient.findById(patientId);
let doctor = await Doctor.findById(doctorId);
//If the patient and doctor ids both exist only then report is created
if (patient && doctor) {
req.body.patient = patientId;
let report = await Report.create(req.body);
if (report) {
//pushing the new report in the patients reports array
await patient.reports.push(report);
await patient.save();
}
return res.status(200).json({
data: {
report: {
patient: patient.name,
status: report.status,
doctor: doctor.name,
date: report.createdAt,
},
},
message: "Report generated successfully",
});
} else {
return res.status(401).json({
message: "Patient/Doctor not registered",
});
}
} catch (err) {
return res.status(500).json({
message: err,
});
}
};
//Get all reports of a patient
module.exports.allReports = async function (req, res) {
let patientId = req.params.id;
try {
//populating the reports array in patient
let patient = await (await Patient.findById(patientId))
.populate({ path: "reports", populate: "doctor patient" })
.execPopulate();
if (patient) {
let reportsOfPatient = patient.reports;
// reportsOfPatient.sort((a, b)=>{b.status-a.status});
let reports = [];
reportsOfPatient.forEach((element) => {
let obj = {};
obj.patient = element.patient.name;
obj.doctor = element.doctor.name;
obj.status = element.status;
obj.date = element.createdAt;
reports.push(obj);
});
return res.status(200).json({
data: {
reports: reports,
},
message: "Reports retrieved successfully",
});
} else {
return res.status(404).json({
message: "Patient not found",
});
}
} catch (err) {
return res.status(500).json({
message: err,
});
}
};
//Get reports by status
module.exports.getReportsByStatus = async function (req, res) {
let prm = req.params.status;
let status = enums[prm];
if (status == undefined) {
return res.status(404).json({
message: "mapping to that status id has not been done",
});
}
try {
let reportsByStatus = await Report.find({ status: status }).populate(
"patient doctor"
);
if (reportsByStatus) {
let reports = [];
reportsByStatus.forEach((element) => {
let obj = {};
obj.patient = element.patient.name;
obj.doctor = element.doctor.name;
obj.status = element.status;
obj.date = element.createdAt;
reports.push(obj);
});
return res.status(200).json({
data: { reports },
message: "Reports retrieved successfully",
});
}
} catch (err) {
return res.status(500).json({
message: err,
});
}
};
<file_sep>/models/doctor.js
// same instance of mongoose
const mongoose = require("mongoose");
//doctor schema design
const doctorSchema = new mongoose.Schema(
{
email: {
type: String,
required: true,
unique: true,
},
password: {
type: String,
required: true,
},
name: {
type: String,
required: true,
},
},
{
timestamps: true,
}
);
//passing the doctorSchema instance to mongoose.model
const Doctor = mongoose.model("Doctor", doctorSchema);
//exporting the schema to be used further
module.exports = Doctor;
<file_sep>/controllers/doctors_controller.js
const Doctor = require("../models/doctor");
const jwt = require("jsonwebtoken");
const cryptoObj = require("../config/crypto");
//register doctor
module.exports.register = async function (req, res) {
//Check all fields present or not
if (
req.body.email == undefined ||
req.body.name == undefined ||
req.body.password == undefined
) {
return res.status(206).json({
message: "Insufficient data",
});
}
//if the doctor is already registered
let reqEmail = req.body.email;
let doctorExists = await Doctor.findOne({ email: reqEmail });
if (doctorExists) {
doctorExists = await doctorExists.toObject();
delete doctorExists.password;
return res.status(405).json({
data: {
doctor: doctorExists,
},
message: "Doctor is registered with email id provided",
});
}
//Encryption doctor's password
let password = <PASSWORD>;
let encrPass = cryptoObj.encrypt(password);
req.body.password = encrPass;
try {
let createdDoctor = await (await Doctor.create(req.body)).toObject();
delete createdDoctor.password;
if (createdDoctor) {
return res.status(200).json({
data: {
doctor: createdDoctor,
},
message: "Successfully Registered Doctor",
});
} else {
return res.status(500).json({
message: "Server error",
});
}
} catch (err) {
return res.status(500).json({
message: `${err}`,
});
}
};
//login using jwt
module.exports.login = async function (req, res) {
let id = req.body.id;
if (req.body.id == undefined || req.body.password == undefined) {
return res.status(206).json({
message: "Insufficient data",
});
}
try {
let doctor = await Doctor.findById(id);
if (doctor) {
let pass = <PASSWORD>.password;
let pwdFromDb = doctor.password;
pwdFromDb = cryptoObj.decrypt(pwdFromDb);
if (pass == pwdFromDb) {
return res.status(200).json({
data: {
token: jwt.sign(doctor.toJSON(), "hospital", {
expiresIn: 1000000,
}),
},
message: "Here is your token",
});
}
}
return res.status(401).json({
message: "Invalid Credentials",
});
} catch (err) {
console.log(err);
return res.status(500).json({
message: `${err}`,
});
}
};
<file_sep>/config/crypto.js
//A package used to encrypt and decrypt using known algos like AES
const CryptoJS = require("crypto-js");
const secretKey = "crpyt";
// Encryption
module.exports.encrypt = function (string) {
let ciphertext = CryptoJS.AES.encrypt(string, secretKey).toString();
return ciphertext;
};
//Decryption
module.exports.decrypt = function (ciphertext) {
let bytes = CryptoJS.AES.decrypt(ciphertext, secretKey);
let originalText = bytes.toString(CryptoJS.enc.Utf8);
return originalText.toString();
};
<file_sep>/routes/index.js
const express = require("express");
const router = express.Router();
const patientsController = require("../controllers/patients_controller");
const passport = require("passport");
router.use("/doctors", require("./doctors"));
router.use("/patients", require("./patients"));
router.get(
"/reports/:status",
passport.authenticate("jwt", { session: false }),
patientsController.getReportsByStatus
);
module.exports = router;
<file_sep>/config/status.js
const enums = {
0: "Negative",
1: "Travelled-Quarantine",
2: "Symptoms-Quarantine",
3: "Positive-Admit",
};
module.exports = enums;
<file_sep>/index.js
const express = require("express");
const app = express();
const port = 8000;
const db = require("./config/mongoose");
const passport = require("passport");
const passportJWT = require("./config/passport-jwt-strategy");
//to recognize the incoming Request Object as strings or arrays
app.use(express.urlencoded());
//to initialize Passport
app.use(passport.initialize());
//use express router
app.use("/", require("./routes"));
//app.get("/", (req, res) => res.send("<h1>Hello World!</h1>"));
//to bind and listen the connections on the specified host and port
app.listen(port, function (err) {
if (err) {
console.log(`Error in running the server: ${err}`);
}
console.log(`Server is running on port: ${port}`);
});
module.exports = app; //testing
<file_sep>/test/doctor.js
const chai = require("chai");
const chaiHttp = require("chai-http");
const expect = chai.expect;
const Doctor = require("../models/doctor");
const Patient = require("../models/patient");
const Report = require("../models/report");
const app = require("../index");
chai.use(chaiHttp);
let doctorsData = {};
let authToken = "";
var createdDoctorId = "";
let createdPatientId = "";
let baseUrl = "http://localhost:8000";
let doctorsUrl = "/doctors";
let patientsUrl = "/patients";
doctorsData["baseUrl"] = baseUrl;
doctorsData["doctorsUrl"] = doctorsUrl;
//Params for register doctor post request
let doctorName = "Doctor_Test";
let doctorEmail = "<EMAIL>";
let password = "<PASSWORD>";
//Params for register patient post request
let patientName = "Patient_Test";
let patientPhone = "0987654321";
describe("Post - Doctor Calls", function () {
it("Post - doctors/register", function (done) {
let call = "/register";
chai
.request(app)
.post(doctorsUrl + call)
.type("form")
.set("content-type", "application/x-www-form-urlencoded")
.send({
name: doctorName,
email: doctorEmail,
password: <PASSWORD>,
})
.end((err, body, response) => {
if (err) {
console.log(err);
}
let createdDoctor = body.body.data.doctor;
createdDoctorId = createdDoctor._id;
expect(body.status).to.equal(200);
expect(createdDoctor).to.have.property("name");
expect(createdDoctor).to.have.property("email");
expect(createdDoctorId.toString().length).to.greaterThan(0);
done();
});
});
it("Post - doctors/login", function (done) {
let call = "/login";
chai
.request(app)
.post(doctorsUrl + call)
.type("form")
.set("content-type", "application/x-www-form-urlencoded")
.send({
id: createdDoctorId,
password: <PASSWORD>,
})
.end((err, body, response) => {
if (err) {
console.log(err);
}
expect(body.status).to.equal(200);
expect(body.body.data).to.have.property("token");
authToken = body.body.data.token;
expect(authToken.length).to.greaterThan(0);
done();
});
});
it("Post - patients/register", function (done) {
let call = "/register";
chai
.request(app)
.post(patientsUrl + call)
.type("form")
.set("content-type", "application/x-www-form-urlencoded")
.set("Authorization", `Bearer ${authToken}`)
.send({
name: patientName,
phone: patientPhone,
})
.end((err, body, response) => {
if (err) {
console.log(err);
}
expect(body.status).to.equal(200);
let createdPatient = body.body.data.patient;
expect(createdPatient).to.have.property("_id");
createdPatientId = createdPatient._id;
expect(createdPatient).to.have.property("name");
expect(createdPatient).to.have.property("phone");
expect(createdPatient).to.have.property("createdAt");
expect(createdPatientId.length).to.greaterThan(0);
done();
});
});
it("Post - patients/:id/create_report", function (done) {
let call = "/create_report";
let url = patientsUrl + "/" + createdPatientId + call;
let status = Math.floor(Math.random() * (3 - 0));
chai
.request(app)
.post(url)
.type("form")
.set("content-type", "application/x-www-form-urlencoded")
.set("Authorization", `Bearer ${authToken}`)
.send({
doctor: createdDoctorId,
status: status,
})
.end((err, body, response) => {
if (err) {
console.log(err);
}
expect(body.status).to.equal(200);
let report = body.body.data.report;
status = report.status;
expect(report).to.have.property("patient");
expect(report).to.have.property("status");
expect(report).to.have.property("doctor");
expect(report).to.have.property("date");
expect(status.length).to.above(0);
done();
});
});
it("Get - patients/:id/all_reports", function (done) {
let call = "/all_reports";
let url = patientsUrl + "/" + createdPatientId + call;
chai
.request(app)
.get(url)
.set("content-type", "application/x-www-form-urlencoded")
.set("Authorization", `Bearer ${authToken}`)
.end((err, body, response) => {
if (err) {
console.log(err);
}
expect(body.status).to.equal(200);
let reports = body.body.data.reports;
expect(reports).to.be.an("array");
expect(reports[0]).to.have.property("patient");
expect(reports[0]).to.have.property("status");
expect(reports[0]).to.have.property("doctor");
expect(reports[0]).to.have.property("date");
expect(reports.length).to.greaterThan(0);
done();
});
});
after(async function () {
let patient = await Patient.findById(createdPatientId);
let reportId = patient.reports[0];
let deletedReport = await Report.findByIdAndDelete(reportId);
let deletedDoctor = await Doctor.findByIdAndDelete(createdDoctorId);
patient.remove();
});
});
|
50581389392c80146866e3331653295adee4b7ad
|
[
"JavaScript"
] | 8 |
JavaScript
|
DhanushM12/HospitalApiWithUnitTesting
|
542fec0bf84d1de5252609f32d1a8edf17c2d28d
|
c22b339737c5c13d60cc0ae2c65719f40941f3cd
|
refs/heads/master
|
<file_sep>import { combineReducers } from 'redux'
import { drinks, drinksHasErrored, drinksIsLoading } from './drinks';
export default combineReducers({
drinks,
drinksHasErrored,
drinksIsLoading
})
<file_sep>export default {
button: {
width: 100,
height: 30,
padding: 10,
backgroundColor: 'lightgray',
alignItems: 'center',
justifyContent: 'center',
margin: 3
},
}<file_sep>import React, {Component} from 'react';
import {View, Text, TouchableOpacity} from 'react-native';
import DrinksGrid from '../DrinksGrid';
import styles from './Display.styles'
const Display = (props) => {
const { fetchData, isLoading, drinks, hasErrored } = props;
if (hasErrored) {
return (
<View style={{flex: 1, alignItems: 'center', justifyContent: 'center' }}>
<Text>...you've had enough</Text>
</View>
)
}
if (isLoading) {
return (
<View style={{flex: 1, alignItems: 'center', justifyContent: 'center' }}>
<Text>Loading…</Text>
</View>
)
}
if (drinks.length === 0) {
return (
<View style={{flex: 1, alignItems: 'center', justifyContent: 'center' }}>
<TouchableOpacity onPress={fetchData} style={styles.button}>
<Text>Press Me!</Text>
</TouchableOpacity>
</View>
)
}
return (
<DrinksGrid drinks={drinks}/>
)
}
export default Display<file_sep>export const START_GET_DRINKS = 'START_GET_DRINKS';
export const DRINKS_IS_LOADING = 'DRINKS_IS_LOADING';
export const DRINKS_FETCH_SUCCESS = 'DRINKS_FETCH_SUCCESS'
export const DRINKS_HAS_ERRORED = 'DRINKS_HAS_ERRORED'<file_sep>import React, {Component} from 'react';
import {View, Text, Image, ScrollView} from 'react-native';
import DrinkComponent from '../Drink'
import styles from './DrinksGrid.styles'
function DrinksGrid(props) {
const { drinks } = props
const drinkElements = drinks.map((drink, i) => {
return (
<DrinkComponent
drink={drink}
key={drink.idDrink}
index={i}
/>
)
})
return (
<ScrollView>
<View
style={styles.drinksContainer}>
{ drinkElements }
</View>
</ScrollView>
)
}
export default DrinksGrid<file_sep>import React, {Component} from 'react';
import Display from '../../components/Display';
import { connect } from 'react-redux';
import { drinksFetchData } from '../../reducers/drinks/actions.js';
class App extends Component {
constructor(props) {
super(props);
}
componentDidMount() {
// this.props.fetchData()
}
render() {
return (
<Display
drinks={this.props.drinks}
isLoading={this.props.isLoading}
hasErrored={this.props.hasErrored}
fetchData={this.props.fetchData}
/>
);
}
}
const mapStateToProps = (state) => {
return {
drinks: state.drinks,
hasErrored: state.drinksHasErrored,
isLoading: state.drinksIsLoading
};
};
const mapDispatchToProps = (dispatch) => {
return {
fetchData: (url) => dispatch(drinksFetchData(url))
};
};
export default connect(mapStateToProps, mapDispatchToProps)(App);<file_sep># React Native + Redux + Express drinks display
Installation:
```
git clone https://github.com/will-sklenars/react-native-redux-express.git
cd react-native-redux-express
```
`npm install`
or
`yarn`
`npm run debug:ios`
`cd api`
`npm install`
or
`yarn`
`node index.js`
<file_sep>import * as types from './actionTypes';
export function drinksHasErrored(boolean) {
return {
type: types.DRINKS_HAS_ERRORED,
hasErrored: boolean
}
}
export function drinksIsLoading (boolean) {
return {
type: types.DRINKS_IS_LOADING,
isLoading: boolean
}
}
export function drinksFetchSuccess (drinks) {
return {
type: types.DRINKS_FETCH_SUCCESS,
drinks
}
}
export function drinksFetchData(e, url = "http://localhost:3000/drinks") {
return (dispatch) => {
dispatch(drinksIsLoading(true));
fetch(url)
.then((response) => {
if (!response.ok) {
throw Error(response.statusText);
}
dispatch(drinksIsLoading(false));
return response;
})
.then((response) => response.json())
.then((json) => dispatch(drinksFetchSuccess(json.drinks)))
.catch(() => dispatch(drinksHasErrored(true)));
};
}
<file_sep>export const drinksIsLoading = false
export const drinksHasErrored = false
export const drinks = []
<file_sep>var express = require('express')
var drinks = require('./drinks')
var app = express()
app.get('/drinks', function(req, res) {
res.json(drinks)
})
app.listen(3000)
|
ee90656071c49c7b137982e0746d4638f7a667b8
|
[
"JavaScript",
"Markdown"
] | 10 |
JavaScript
|
will-sklenars/react-native-redux-express
|
3bf5a1001f8d573bfdf943d1245e90e6964af280
|
83e49b560a4acc5bffd2cabb70f5d085ca2c7a36
|
refs/heads/master
|
<file_sep>//var r =require('../sendNotificationModule.js');
var express = require('express'); // call express
var app = express(); // define our app using express
var bodyParser = require('body-parser');
// configure app to use bodyParser()
// this will let us get the data from a POST
app.use(bodyParser.urlencoded({ extended: true }));
app.use(bodyParser.json());
var port = process.env.PORT || 8080; // set our port
// ROUTES FOR OUR API
// =============================================================================
var router = express.Router(); // get an instance of the express Router
// more routes for our API will happen here
router.get('/', function(req, res){
var Notification = require('./modules/sendNotificationModule.js');
var message = {
"to": "",
"data": {
"hello": "This is my First Message",
}
};
Notification.sendNotification(message);
res.json({message: 'Post is working '+res});
})
// REGISTER OUR ROUTES -------------------------------
// all of our routes will be prefixed with /api
app.use('/api', router);
// START THE SERVER
// =============================================================================
app.listen(port);
console.log('Magic happens on port ' + port);<file_sep>
var sendNotification = function(data) {
var fcmServerKey ="";
/*Firebase Cloud Messaging Server Key
You will find the key in Project Setting under Cloud Messaging
*/
var headers = {
"Content-Type": "application/json",
"Authorization": "key="+fcmServerKey
};
var options = {
host: "fcm.googleapis.com",
port: 443,
path: "/fcm/send",
method: "POST",
headers: headers
};
var https = require('https');
var req = https.request(options, function(res) {
res.on('data', function(data) {
console.log("Response:");
console.log(JSON.parse(data));
});
});
req.on('error', function(e) {
console.log("ERROR:");
console.log(e);
});
req.write(JSON.stringify(data));
req.end();
};
module.exports.sendNotification = sendNotification;
/*
var message = {
"to": "com.example.lenovo.know8",
"data": {
"hello": "This is a Firebase Cloud Messaging Device Group Message!",
}
};
sendNotification(message);
*/
|
4252df043398a6467cca46c4d5d1e5c79b3554ae
|
[
"JavaScript"
] | 2 |
JavaScript
|
ASHISH961996/FirebaseNotificationNodejs
|
d2e2ba4587c506e5d9cd7feaaee196b3a497cf9e
|
9625cf6304580d0e250844e5e3e787039961f779
|
refs/heads/master
|
<file_sep># guessTheNumber
A guess a Number Project.
<file_sep>// Steps to build the game
// Set a random number
// check the number to see if it is above, below, or the same as the number
// display score when checking the button
// reset the game with button
// document.getElementById('message').textContent = 'Correct Number!';
// document.getElementById('number').textContent = 13;
// document.getElementById('score').textContent = 10;
// document.getElementById('guess').value = 10;
let secreatNumber = Math.trunc(Math.random() * 20) + 1;
//document.getElementById('number').textContent = secreatNumber;
let score = Number(document.getElementById('score').textContent);
let highscore = 0;
const displayMessage = function(message) {
document.querySelector('#message').textContent = message;
}
// Start the game
document.getElementById('checkNumber').addEventListener('click', function(){
const guess = Number(document.querySelector('#guess').value);
console.log(guess, typeof(guess));
// There is no guess
if(!guess) {
// document.getElementById('message').textContent = 'No number!';
displayMessage('No number!');
// Player wins the game
} else if (guess === secreatNumber) {
// document.getElementById('message').textContent = "You guessed the number!";
displayMessage("You guessed the number!");
document.querySelector('.container').style.backgroundColor = '#60b347';
document.querySelector('#number').style.fontSize = '12rem';
document.querySelector('.mysteryNumberSection').style.width = '250px';
document.getElementById('number').textContent = secreatNumber;
if(score > highscore) {
highscore = score;
document.getElementById('highScore').textContent = highscore;
}
// When guess is wrong
} else if(guess !== secreatNumber) {
if (score > 1) {
// document.getElementById('message').textContent = guess > secreatNumber ? "Guess is too High!" : "Guess is too low!";
displayMessage(guess > secreatNumber ? "Guess is too High!" : "Guess is too low!");
score = score - 1;
document.getElementById('score').textContent = score;
} else {
//document.getElementById('message').textContent = 'You lost the game!';
displayMessage('You lost the game!');
document.getElementById('score').textContent = 0;
}
}
// // Guess is too high
// } else if (guess > secreatNumber) {
// if (score > 1) {
// document.getElementById('message').textContent = "Guess is too High!";
// score = score - 1;
// document.getElementById('score').textContent = score;
// } else {
// document.getElementById('message').textContent = 'You lost the game!';
// document.getElementById('score').textContent = 0;
// }
// // Guess is too low
// } else if (guess < secreatNumber) {
// if (score > 1) {
// document.getElementById('message').textContent = "Guess is too low!";
// score = score - 1;
// document.getElementById('score').textContent = score;
// } else {
// document.getElementById('message').textContent = 'You lost the game!';
// document.getElementById('score').textContent = 0;
// }
// }
});
// Play again
document.getElementById('reset').addEventListener('click', function(){
score = 20;
secreatNumber = Math.trunc(Math.random() * 20) + 1;
//document.getElementById('message').textContent = "Start guesing...";
displayMessage('start guesing...');
document.getElementById('score').textContent = score;
document.getElementById('number').textContent = '?';
document.querySelector('#guess').value = '';
document.querySelector('.container').style.backgroundColor = 'black';
document.querySelector('#number').style.fontSize = '6rem';
document.querySelector('.mysteryNumberSection').style.width = '160px';
})
|
7036fc5132c8bd709b6dbb22a3d54fdca9120e4d
|
[
"Markdown",
"JavaScript"
] | 2 |
Markdown
|
Etang131/guessTheNumber
|
66fb84d3be49b5862a96e30a5d07a4db724cc0e0
|
d3f5aaa35dc365ec62cfc826f0e10fc27e79689f
|
refs/heads/master
|
<repo_name>aiyuai1987/quill<file_sep>/formats/header.js
import Block from '../blots/block';
class Header extends Block {
static formats(domNode) {
return this.tagName.indexOf(domNode.tagName) + 1;
}
optimize() {
super.optimize();
let text = this.domNode.textContent.toLowerCase();
let id = text.replace(/[^a-z0-9]+/g, '-').replace(/^\-/, '').replace(/\-$/, '');
if (this.domNode.id !== id) {
if (id.length === 0) {
this.domNode.removeAttribute('id');
} else {
this.domNode.id = id;
}
}
}
}
Header.blotName = 'header';
Header.tagName = ['H1', 'H2', 'H3', 'H4', 'H5', 'H6'];
export default Header;
|
c784874931cdd8ca369a278195a9b16cb8e2fcda
|
[
"JavaScript"
] | 1 |
JavaScript
|
aiyuai1987/quill
|
3d948a7477954e37375fcd3d832f8ccaf2b0a4a0
|
e709fd7ac163e04db4c8156b0fc60a199d108449
|
refs/heads/master
|
<file_sep>#ifndef _RING_BUFFER_H_
#define _RING_BUFFER_H_
#include <pthread.h>
typedef struct
{
long size; // current size of data in ringbuffer
long cap; // capacity of ringbuffer
void *buf;
long roffset;
long woffset;
pthread_mutex_t mutex_io;
pthread_cond_t cont_read;
pthread_cond_t cont_write;
}RING_BUFFER_s;
typedef enum{
false=0,
true=1,
}bool;
RING_BUFFER_s *ringbuffer_create(int capacity);
void ringbuffer_destroy(RING_BUFFER_s *rbuf);
int ringbuffer_get(RING_BUFFER_s *rbuf, void *out_buf, int size, unsigned long timeout);
int ringbuffer_put(RING_BUFFER_s *rbuf, const void *in_buf, int size, unsigned int timeout);
bool ringbuffer_full(RING_BUFFER_s *rbuf);
bool ringbuffer_empty(RING_BUFFER_s *rbuf);
long ringbuffer_used(RING_BUFFER_s *rbuf);
long ringbuffer_unused(RING_BUFFER_s *rbuf);
#endif
<file_sep>all:
gcc -o test -g test.c ringbuffer.c -lpthread
<file_sep>#include <stdlib.h>
#include <string.h>
#include <stdio.h>
#include <sys/time.h>
#include "ringbuffer.h"
/* create a new ringbuffer
* @capacity max buffer size of the ringbuffer
* @return the address of the new ringbuffer, NULL for error.
*/
RING_BUFFER_s *ringbuffer_create(int capacity)
{
RING_BUFFER_s *rbuf;
int ret;
rbuf = malloc(sizeof(RING_BUFFER_s));
if (rbuf == NULL) {
printf("malloc ringbuf error !\n");
return NULL;
}
rbuf->cap = capacity;
rbuf->buf = malloc(rbuf->cap);
if (rbuf->buf == NULL) {
printf("malloc error!\n");
goto err0;
}
rbuf->size = 0;
rbuf->roffset = 0;
rbuf->woffset = 0;
ret = pthread_mutex_init(&rbuf->mutex_io, NULL);
if (ret) {
printf("pthread_mutex_init error: %s\n", strerror(ret));
goto err1;
}
ret = pthread_cond_init(&rbuf->cont_read, NULL);
if (ret) {
printf("pthread_cond_init cont_read error: %s\n", strerror(ret));
goto err2;
}
ret = pthread_cond_init(&rbuf->cont_write, NULL);
if (ret) {
printf("pthread_cond_init cont_write error: %s\n", strerror(ret));
goto err3;
}
return rbuf;
err3:
pthread_cond_destroy(&rbuf->cont_read);
err2:
pthread_mutex_destroy(&rbuf->mutex_io);
err1:
free(rbuf->buf);
err0:
free(rbuf);
return NULL;
}
void ringbuffer_destroy(RING_BUFFER_s *rbuf)
{
if (rbuf) {
pthread_cond_destroy(&rbuf->cont_write);
pthread_cond_destroy(&rbuf->cont_read);
pthread_mutex_destroy(&rbuf->mutex_io);
free(rbuf->buf);
free(rbuf);
}
}
/* get data from ringbuffer @rbuf
* @rbuf ringbuffer where to get data
* @out_buf output buffer where to store data
* @size size of @out_buf
* @timeout timeout in ms
* @return return number of bytes read; 0 for timeout; -1 for error
*/
int ringbuffer_get(RING_BUFFER_s *rbuf, void *out_buf, int size, unsigned long timeout)
{
int ret;
int nr;
ret = pthread_mutex_lock(&rbuf->mutex_io);
if (ret) {
return -1;
}
struct timespec ts;
clock_gettime(CLOCK_REALTIME, &ts);
ts.tv_sec += timeout / 1000;
ts.tv_nsec += (timeout % 1000) * 1000 * 1000;
while (rbuf->size == 0)
{
if (timeout) {
if (pthread_cond_timedwait(&rbuf->cont_read, &rbuf->mutex_io, &ts)) {
pthread_mutex_unlock(&rbuf->mutex_io);
return 0;
}
} else {
if (pthread_cond_wait(&rbuf->cont_read, &rbuf->mutex_io)) {
pthread_mutex_unlock(&rbuf->mutex_io);
return -1;
}
}
}
if (rbuf->woffset > rbuf->roffset) {
int avail_count = rbuf->woffset - rbuf->roffset;
// number to read
nr = size > avail_count ? avail_count : size;
// copy data
memcpy(out_buf, rbuf->buf + rbuf->roffset, nr);
// update read offset
rbuf->roffset += nr;
rbuf->size -= nr;
} else {
// number to read
int part1 = rbuf->cap - rbuf->roffset;
int num_to_read = size > part1 ? part1 : size;
memcpy(out_buf, rbuf->buf + rbuf->roffset, num_to_read);
nr = num_to_read;
// update read offset
rbuf->size -= nr;
rbuf->roffset += nr;
if (rbuf->roffset == rbuf->cap) {
rbuf->roffset = 0;
}
int remain = size - nr;
if (remain > 0) {
num_to_read = remain > rbuf->woffset ? rbuf->woffset : remain;
memcpy(out_buf + nr, rbuf->buf, num_to_read); // part 2
// update read offset
rbuf->roffset = num_to_read;
rbuf->size -= num_to_read;
remain -= num_to_read;
}
nr = size - remain;
}
pthread_cond_signal(&rbuf->cont_write);
pthread_mutex_unlock(&rbuf->mutex_io);
return nr;
}
/* write data to ringbuffer @rbuf;
* @rbuf ringbuffer where to write data to;
* @in_buf input buffer;
* @size size of input buffer @in_buf
* @timeout timeout in ms;
* @return the number of bytes written to ringbuffer; 0 for timeout; -1 for error;
*/
int ringbuffer_put(RING_BUFFER_s *rbuf, const void *in_buf, int size, unsigned int timeout)
{
int ret;
int nw;
ret = pthread_mutex_lock(&rbuf->mutex_io);
if (ret) {
return -1;
}
struct timespec ts;
clock_gettime(CLOCK_REALTIME, &ts);
ts.tv_sec += timeout / 1000;
ts.tv_nsec += (timeout % 1000) * 1000;
while ( rbuf->cap - rbuf->size < size) // check have no enough space
{
if (timeout) {
if (pthread_cond_timedwait(&rbuf->cont_write, &rbuf->mutex_io, &ts)) {
pthread_mutex_unlock(&rbuf->mutex_io);
return 0;
}
} else {
if (pthread_cond_wait(&rbuf->cont_write, &rbuf->mutex_io)) {
pthread_mutex_unlock(&rbuf->mutex_io);
return -1;
}
}
}
if (rbuf->woffset < rbuf->roffset) {
int free_space = rbuf->roffset - rbuf->woffset;
nw = size > free_space ? free_space : size;
memcpy(rbuf->buf + rbuf->woffset, in_buf, nw);
rbuf->woffset += nw;
rbuf->size += nw;
} else {
int part1 = rbuf->cap - rbuf->woffset;
int num_to_write = size > part1 ? part1 : size;
// copy part 1
memcpy(rbuf->buf + rbuf->woffset, in_buf, num_to_write);
// update write offset
nw = num_to_write;
rbuf->size += nw;
rbuf->woffset += nw;
if (rbuf->woffset == rbuf->cap) {
rbuf->woffset = 0;
}
int remain = size - nw;
if (remain > 0) {
// copy part2
num_to_write = remain > rbuf->roffset ? rbuf->roffset : remain;
memcpy(rbuf->buf, in_buf + nw, num_to_write);
// update write offset
rbuf->size += num_to_write;
rbuf->woffset = num_to_write;
nw += num_to_write;
}
}
pthread_cond_signal(&rbuf->cont_read);
pthread_mutex_unlock(&rbuf->mutex_io);
return nw;
}
bool ringbuffer_full(RING_BUFFER_s *rbuf)
{
bool ret = false;
if(rbuf){
pthread_mutex_lock(&rbuf->mutex_io);
ret = (rbuf->size == rbuf->cap);
pthread_mutex_unlock(&rbuf->mutex_io);
return ret;
}
else
return false;
}
bool ringbuffer_empty(RING_BUFFER_s *rbuf)
{
bool ret = false;
if(rbuf){
pthread_mutex_lock(&rbuf->mutex_io);
ret = (rbuf->size == 0);
pthread_mutex_unlock(&rbuf->mutex_io);
return ret;
}
else
return false;
}
long ringbuffer_used(RING_BUFFER_s *rbuf)
{
long ret=0;
if(rbuf){
pthread_mutex_lock(&rbuf->mutex_io);
ret = rbuf->size;
pthread_mutex_unlock(&rbuf->mutex_io);
return ret;
}
else
return -1;
}
long ringbuffer_unused(RING_BUFFER_s *rbuf)
{
long ret = 0;
if(rbuf){
pthread_mutex_lock(&rbuf->mutex_io);
ret= rbuf->cap - rbuf->size;
pthread_mutex_unlock(&rbuf->mutex_io);
return ret;
}else
return -1;
}
<file_sep>#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <unistd.h>
#include "ringbuffer.h"
void *handle(void *arg)
{
RING_BUFFER_s *rb =(RING_BUFFER_s *)arg;
char buf[10]={0};
while(1){
memset(buf,0,sizeof(buf));
ringbuffer_get(rb, buf, 1,0);
printf("ringbuffer read =[%s]\n",buf);
printf("ringbuffer used=%ld,unused=%ld\n",ringbuffer_used(rb),ringbuffer_unused(rb));
sleep(1);
}
}
int main()
{
printf("Hello world\n");
RING_BUFFER_s *rb = ringbuffer_create(100);
pthread_t p;
pthread_create(&p, NULL, handle, (void*)rb);
printf("ringbuffer size =%ld emptey=%d\n",rb->cap, ringbuffer_empty(rb));
printf("ringbuffer used=%ld,unused=%ld\n",ringbuffer_used(rb),ringbuffer_unused(rb));
ringbuffer_put(rb, "1234567890", 10,0);
printf("ringbuffer used=%ld,unused=%ld\n",ringbuffer_used(rb),ringbuffer_unused(rb));
ringbuffer_put(rb, "1234567890", 10,0);
printf("ringbuffer used=%ld,unused=%ld\n",ringbuffer_used(rb),ringbuffer_unused(rb));
ringbuffer_put(rb, "1234567890", 10,0);
ringbuffer_put(rb, "1234567890", 10,0);
ringbuffer_put(rb, "1234567890", 10,0);
ringbuffer_put(rb, "1234567890", 10,0);
ringbuffer_put(rb, "1234567890", 10,0);
ringbuffer_put(rb, "1234567890", 10,0);
printf("put ret=%d\n",ringbuffer_put(rb, "1234567890", 10,5000));
printf("put ret=%d\n",ringbuffer_put(rb, "1234567890", 10,5000));
printf("put ret=%d\n",ringbuffer_put(rb, "1234567890", 10,5000));
printf("put ret=%d\n",ringbuffer_put(rb, "1234567890", 10,0));
printf("put ret=%d\n",ringbuffer_put(rb, "1234567890", 10,0));
printf("put ret=%d\n",ringbuffer_put(rb, "1234567890", 10,0));
printf("put ret=%d\n",ringbuffer_put(rb, "1234567890", 10,0));
printf("put ret=%d\n",ringbuffer_put(rb, "1234567890", 10,0));
printf("put ret=%d\n",ringbuffer_put(rb, "1234567890", 10,0));
printf("ringbuffer emptey=%d\n",ringbuffer_empty(rb));
printf("ringbuffer used=%ld,unused=%ld\n",ringbuffer_used(rb),ringbuffer_unused(rb));
while(1){
sleep(1);
}
/*
char buf[100]={0};
ringbuffer_get(rb, buf, 100,0);
printf("ringbuffer read 100=[%s]\n",buf);
printf("ringbuffer emptey=%d\n",ringbuffer_empty(rb));
printf("ringbuffer used=%d,unused=%d\n",ringbuffer_used(rb),ringbuffer_unused(rb));
*/
ringbuffer_destroy(rb);
return 0;
}
|
afe743d8b759076d022305ce3808d92176a68a72
|
[
"C",
"Makefile"
] | 4 |
C
|
ydb1358396458/ringbuffer
|
997c1d8ccf2292237143f89dfded58ce89abce0f
|
28605133a5067a4ca6a32ca915bba3b47d8cf42e
|
refs/heads/master
|
<repo_name>NaphNinja/Mood-Analyzer<file_sep>/README.md
# Mood-Analyzer
This application aims to perform Sentimental Analysis on pre recorded audio files (.wav format) to obtain results based on emotive modulation on the voice.
# mood-class
<i>mood-class</i> is a classification tool that accepts inputs in the form of audio files and returns outputs in the form of text labels, corresponding to one in five emotion classes:
> anger
> disgust
> fear
> happiness
> sadness
Have a speech utterance you'd like to know the overall tone of? Read on to learn more about how to use <i>mood-class</i>!
## Getting Started
These next set of instructions will help you get a copy of <i>mood-class</i> and its corresponding requirements installed on your local machine.
### Prerequisites
#### Python
<i>mood-class</i> and its various dependencies are all built on top of Python. <i>mood-class</i> will run on Python versions 3 or higher. (This was tested on Python3.7)
#### Python Libraries
We'll be making use of several Python libraries to get the classifier to work. We can use pip to pre-install these.
```
pip install numpy matplotlib scipy sklearn hmmlearn simplejson eyed3 pydub
```
If you don't have pip installed, you can check out the documentation for how to do so [here](https://pip.pypa.io/en/stable/installing/). The above list of libraries are courtesy of [pyAudioAnalysis](https://github.com/tyiannak/pyAudioAnalysis), which is the framework we will use to perform the classification task.
### Installations and Set Up
#### Clone this repository
Navigate to your preferred directory location and clone this repository.
```
git clone https://github.com/NaphNinja/Mood-Analyzer.git
```
#### Clone pyAudioAnalysis
```
git clone https://github.com/tyiannak/pyAudioAnalysis
```
Make sure that you place this repository within your <i>mood-class</i> directory, specifically where you will choose to perform the classification task. For reference, my placement looks like this:
```
mood-class/
|_audioClassication/
|__pyAudioAnalysis/
```
#### Update path in `bashrc` file
In order to get pyAudioAnalysis to work, you would need to update the Python path in your `bashrc` file. You can access this hidden file via your terminal by any text editor of your choice. Here's how you might edit the file using emacs:
```
emacs .bashrc
```
Once you have the file open, set the Python path to where you saved pyAudioAnalysis locally.
```
export PYTHONPATH=$PYTHONPATH:"/Users/navrajnarula/Desktop/audioClassification/pyAudioAnalysis
```
To update these specifications, specify the source in your terminal.
```
source ~/.bashrc
```
#### Download FFmpeg
FFmpeg is a free software that is designed for command-line-based processing of audio files. Since our classifier works with audio files, I would suggest downloading FFmpeg [here](https://www.ffmpeg.org/download.html) to avoid trivial errors when it comes to reading in inputs.
## Running <i>mood-class</i>
Now that everything has been set up on your local machine, we can go ahead and run the classifier!
### Data
You can use any audio data that you have available for this classifier. Since my focus is on emotional analysis, I utilized [CREMA-D](https://github.com/CheyneyComputerScience/CREMA-D). This dataset contains over 7,000 instances of speech utterances delivered by actors that fall into labeled categories of anger, disgust, fear, happiness, and sadness. To obtain the full dataset, head on over to CREMA-D's repository and download the data using [Git Large File Storage](https://git-lfs.github.com/).
```
mood-class/
|_audioClassification/
|__trainingdata/
|___anger/
|___disgust/
|___fear/
|___happiness/
|___sadness/
```
Note that pyAudioAnalysis will work on both `.mp3` and `.wav` files. My training files are in `.wav` format.
### Train Data
<i>mood-class</i> currently uses an [SVM](https://en.wikipedia.org/wiki/Support_vector_machine) model in this example, but if you prefer to use another one, check out [pyAudioAnalysis](https://github.com/tyiannak/pyAudioAnalysis) for varying models it may support. To train <i>mood-class</i> on your training data, simply run this command:
```
python createClassifierModel.py trainingData
```
`trainingData` is the directory I have stored my training files in.
Once the data is trained, you'll see a contingency table representing the output.
```
dis fear hap
dis 16.67 1.67 2.75
fear 5.08 11.08 5.67
hap 4.00 5.92 13.50
```
The above displays the matrix for disgust, fear, and happiness.
### Testing Data
<i>mood-class</i> will categorize any audio file into any class of your labeled dataset.
```
python testClassifierModel.py happiness_test.wav
```
Passing in an untrained happy file to <i>mood-class</i> returns the probability for which class the file would fall into:
```
classNames is ['disgust', 'fear', 'happiness']
P is [0.01042657 0.04208108 0.94749235]
result is 2.0
File: happiness_test.wav is in category: happiness, with probability: 0.94749235
```
## Maintenance
This project is still a work in progress. New contributors, pull requests, and issues are always welcome.
<file_sep>/Naph_Mood/Sentiment_Analysis/audioClassification/createClassifierModel.py
#!/usr/local/bin/python2
from pyAudioAnalysis import audioTrainTest as aT
import os
from sys import argv
import pydub
pydub.AudioSegment.converter = r"/home/naph/Downloads/ffmpeg/ffmpeg-2.8.15"
script, dirname = argv
subdirectories = os.listdir(dirname)
#subdirectories.pop(0)
subdirectories = [dirname + "/" + subDirName for subDirName in subdirectories]
print(subdirectories)
aT.featureAndTrain(subdirectories, 1.0, 1.0, aT.shortTermWindow, aT.shortTermStep, "svm", "svmModel", False)
<file_sep>/Naph_Mood/Sentiment_Analysis/audioClassification/testClassifierModel.py
#!/usr/local/bin/python2
from sys import argv
import numpy as np
from pyAudioAnalysis import audioTrainTest as aT
import pydub
pydub.AudioSegment.converter = r"/home/naph/Downloads/ffmpeg/ffmpeg-2.8.15"
script, filename = argv
isSignificant = 0.8 #try different values.
# P: list of probabilities
Result, P, classNames = aT.fileClassification(filename, "svmModel", "svm")
print("result is", Result)
print("classNames is", classNames)
print("P is", P)
print("result is", Result)
winner = np.argmax(P) #pick the result with the highest probability value.
# is the highest value found above the isSignificant threshhold?
#if P[winner] > isSignificant :
print("File: " +filename + " is in category: " + classNames[winner] + ", with probability: " + str(P[winner]))
#else :
#print("Can't classify sound: " + str(P))
|
7718bc73eb62d5cd6d33284d1bc128aea9c253b3
|
[
"Markdown",
"Python"
] | 3 |
Markdown
|
NaphNinja/Mood-Analyzer
|
fe93df50098e207ce91005fe6f63f66a2d6f7999
|
475d030f3ce3c6ff8493d757b506122bbfebd6f5
|
refs/heads/master
|
<file_sep># When recurring on a number, ask 1) 0? and 2) else
# Always change at least one argument while recurring.
# It must be changed to be closer to termination.
# The changing argument must be tested in the termination condition.
class Wrapper
def wrap str, max
final_str = ""
foo = get_first_split(str, max)
first_cut = foo[0]
remaining_str = foo[1]
while (remaining_str != "") do
final_str += first_cut + "\n"
foo = get_first_split(remaining_str, max)
first_cut = foo[0]
remaining_str = foo[1]
end
final_str += first_cut
end
def get_first_split str, max
if str.length < max then
a = str
b = ""
else
ndx = get_split_index(str, max)
a = (str.slice(0, ndx)).chomp
b = str.slice(ndx, str.length).lstrip
end
return [a,b]
end
def get_split_index str, max
return max if str[max] == ' '
ndx = 0
last_space_ndx = max
str.chars do |c|
last_space_ndx = ndx if c == ' '
ndx += 1
return last_space_ndx if ndx == max
end
return last_space_ndx
end
end
<file_sep>require File.dirname(__FILE__) + '/../app/Wrapper.rb'
describe "Wrapper" do
w = Wrapper.new
describe "#wrap" do
it "string less than max column, then returns string" do
w.wrap("foo", 80).should == "foo"
end
it "string longer than max, then splits the string at the max length" do
w.wrap("foobar", 3).should == "foo\nbar"
end
it "string must be split more than once" do
w.wrap("onetwothree", 3).should == "one\ntwo\nthr\nee"
end
it "string will be split on last space instead of at max length if appropriate" do
w.wrap("one two", 5).should == "one\ntwo"
w.wrap("one twothreefourfive six seven", 5).should ==
"one\ntwoth\nreefo\nurfiv\ne six\nseven"
w.wrap("12 456 890", 6).should == "12 456\n890"
end
end
describe "#get_split_index " do
it "returns max if there is no space" do
w.get_split_index("foo", 5).should == 5
end
it "returns index of last space when appropriate" do
w.get_split_index("foo bar",5).should == 3
w.get_split_index("foo bar baz", 8).should == 7
end
it "will return max if str[max] is a space" do
w.get_split_index("12 456 789",6).should == 6
end
end
end
describe "rspec start" do
it "this too shall pass" do
true.should == true
end
end
|
e60751d0f7f3c75dc47399156329028984d4d419
|
[
"Ruby"
] | 2 |
Ruby
|
markhaskamp/word-wrap-kata
|
7c7ca52a23d2675f8e1a0e0c80d428f243745dde
|
f47c3871749fd78503482f633504fe55997d901d
|
refs/heads/main
|
<file_sep># diar
Soft de control inmobiliario
|
7a90a43256f9860bedd0eaad03fb6c266412c7c1
|
[
"Markdown"
] | 1 |
Markdown
|
hipogea/diar
|
8aa07671aafc33286277043a1db8537cbb762bd8
|
978675bba5e78d56012330f14cdebb8232ba4e65
|
refs/heads/master
|
<repo_name>Epromee/chengyu-dictionary<file_sep>/data/chengyu_frequency.py
import json
idioms = list()
with open("chinese-idioms.txt", "r") as f:
for line in f:
parts = line.strip().split(",")
# idiom = parts[1]
idioms.append(parts)
print("All chinese idioms: ", len(idioms))
print("loading file...")
file = open('zh_cn.txt', 'r').read()
data_list = list()
total_idioms = len(idioms)
for k, idiom in enumerate(idioms):
count = file.count(idiom[1])
frequency = {
"ID": idiom[0].strip('"'),
"Abbr": idiom[6].strip('"'),
"Chinese": idiom[1].strip('"'),
"ChineseExplanation": idiom[3].strip('"'),
"EnglishLiteral": "N/A",
"EnglishFigurative": "N/A",
"Pinyin": idiom[2].strip('"'),
"Example": idiom[5].strip('"'),
"ExampleTranslation": "N/A",
"Origin": idiom[4].strip('"'),
"OriginTranslation": "N/A",
"Frequency": count
}
print("Idiom: ", idiom[1], k, "/", total_idioms, end="\r")
data_list.append(frequency)
print("")
sorted_list = sorted(data_list, key=lambda k: k['Frequency'], reverse=True)
output_string = json.dumps(sorted_list, indent=4, ensure_ascii=False)
with open("data.json", "w+") as f:
f.write(output_string)<file_sep>/Project Proposal.md
### Project Proposal
We want to create a Chinese-English Chengyu (Four-Character Idiom) Dictionary that is useful both to native speakers and (middle-level to advanced) language learners, or anyone interested in Chengyu in general.
#### Motivation/Gap
We do have Chinese Chengyu Dictioanry, although most of them are in printed version. However, there is no well-formulated Chinese-English Chengyu Dictionary either in print or online. We have found some online database covering only Chengyu in Chinese, but no online dictionary in English or database with English translation for both literal and figurative meaning.
#### Surface-level Description
This Chinese-English Chengyu Dictionary allows you to search either by Chinese or English or Pinyin. All of them are with fuzzy-matching enabled.
Also, a user can search with provided tags, such as "numbers", "animals" and even forms like "一五一十" with the first and third characters being the same can be represented with an "AXAY" tag.
#### Examples of use cases
For example, you can search by "one stone two birds" ,then "一石二鸟" would come up, with its original Chinese lexeme "一石二鸟", its Pinyin “Yi Shi Er Niao”, its literal meaning "one stone two birds", its figurative meaning "doing one thing gets multiple benifits", and its tags, "number" and "animal". You can also search by its literal or figurative meaning. In addition to that, you can select tags, such as "number", then every 4-character idiom that contain at least one number character (e.g. "七上八下": 'seven up eight down') would come up. You can also search with form tags which I mentioned above. Let's say "AXAY", then all idioms with first and third character being the same come up.


#### Resources
Right now, there are several database covered in Chinese.
* Include entries from Xinhua Dictionary (but actually obtained via another dictionary website: http://www.zd9999.com): https://github.com/pwxcoo/chinese-xinhua
* a collection of Chinese idioms collected from the web: https://github.com/by-syk/chinese-idiom-db
References to English translations of figurative meaning:
https://www.chinese-tools.com/chinese/chengyu/dictionary
https://www.saporedicina.com/english/list-chengyu/
However, we need to translate them into English in both literal and figurative meanings. We plan to have 100-150 entries to worked on (with translations) but we can include much more entries in Chinese.
#### Feature List
Enhancing information:
Include tags/badges for each Chenyu which gives:
1. structural information (common forms are: AABB, ABAC, AABC, BCAA...), great to have for users who want to solve Chengyu puzzles
2. character information (tells if the Chengyu contain at least one character which represent animal, number, organs, people...)
3. semantic information (positive/negative/neutral; is story-based or not)

Entry Representation:
* English mode:
Lemma (a 4-character idiom) [pinyin]
Literal meaning (in English)
Figurative usage
(Origin/Example sentence[bilingual]/select for export)
* Chinese mode:
Lemma (a 4-character idiom) [pinyin]
Meaning and usage information in Chinese
(Origin/Example sentence/select for export)
* Bilingual mode:
Lemma (a 4-character idiom) [pinyin]
Literal meaning (in English, useless in Chinese)
Figurative usage in both Chinese and English
(Origin/Example sentence/select for export)
#### Technology
We want to use GWT as our major framework for the web application (like in the practice assignments). We would use mySQL to create a database to store our data. And we would use Bootstrap as our UI framework.
#### Nice-to-have features
1. origin:
It is better to have a origin for Chengyu because a large part of them are coming from stories. If we had time, we would like to present the origins in both Chinese and English. Most translations must be done manually. The origin of a Chengyu is crucial to the understanding of its meaning.
2. example sentence:
Another nice-to-have feature is example sentences, they are easy to find in Chinese and they provide the context of usage (which is probably already covered in explaining the figurative meaning of a Chengyu), but how to translate them into English poses a problem.
3. select and export:
Export the marked Chengyu to a .csv file, makes it easier for learners to select the entries they want and add to flashcard sets, e.g. to Quizlet
<file_sep>/data/data_structure.sql
CREATE SCHEMA `colewe` DEFAULT CHARACTER SET utf8 ;
CREATE TABLE `colewe`.`Chengyu` (
`ID` INT NOT NULL,
`Abbr` VARCHAR(45) NULL,
`Chinese` VARCHAR(45) NULL,
`ChineseExplanation` VARCHAR(45) NULL,
`EnglishLiteral` VARCHAR(45) NULL,
`EnglishFigurative` VARCHAR(45) NULL,
`Pinyin` VARCHAR(45) NULL,
`Example` VARCHAR(255) NULL,
`ExampleTranslation` VARCHAR(255) NULL,
`Origin` VARCHAR(255) NULL,
`OriginTranslation` VARCHAR(255) NULL,
`Frequency` INT NULL,
PRIMARY KEY (`ID`));
CREATE TABLE `colewe`.`Tags` (
`ID` INT NOT NULL,
`Tag` VARCHAR(45) NULL,
PRIMARY KEY (`ID`));
CREATE TABLE `colewe`.`ChengyuTag` (
`ChengyuID` INT NOT NULL,
`TagID` INT NOT NULL,
PRIMARY KEY (`ChengyuID`, `TagID`),
INDEX `fk_ChengyuTag_2_idx` (`TagID` ASC),
CONSTRAINT `fk_ChengyuTag_1`
FOREIGN KEY (`ChengyuID`)
REFERENCES `colowe`.`Chengyu` (`ID`)
ON DELETE NO ACTION
ON UPDATE NO ACTION,
CONSTRAINT `fk_ChengyuTag_2`
FOREIGN KEY (`TagID`)
REFERENCES `colowe`.`Tags` (`ID`)
ON DELETE NO ACTION
ON UPDATE NO ACTION);
<file_sep>/chengyu.dict/src/main/java/com/colewe/ws1819/server/DictionaryServiceImpl.java
/**
*
*/
package com.colewe.ws1819.server;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import javax.servlet.ServletException;
import com.google.gwt.user.server.rpc.RemoteServiceServlet;
import com.colewe.ws1819.client.DictionaryService;
import com.colewe.ws1819.shared.Entry;
/**
* this service works with dictionary function
*
*
* @author jingwen and xuefeng
*
*/
public class DictionaryServiceImpl extends RemoteServiceServlet implements DictionaryService {
// JDBC driver name and database URL
static final String JDBC_DRIVER = "com.mysql.cj.jdbc.Driver";
static final String DB_URL = "jdbc:mysql://localhost/colewe?useUnicode=yes&characterEncoding=UTF-8";
// Database credentials
static final String USER = "root";
static final String PASS = "<PASSWORD>";
@Override
public void init() throws ServletException {
try {
// initialize data base driver
Class.forName("com.mysql.cj.jdbc.Driver");
} catch (ClassNotFoundException e) {
throw new ServletException(e);
}
}
/**
* search function for dictionary
*
* @param
* @author Xuefeng and Jingwen
*/
@Override
public ArrayList<Entry> search(String target, int mode) {
//doing the search
ArrayList<Entry> result = new ArrayList<Entry>();
Connection conn;
PreparedStatement stmt = null;
try {
conn = DriverManager.getConnection(DB_URL, USER, PASS);
ResultSet rs;
switch(mode) {
case 1:
stmt = conn.prepareStatement("select * from Chengyu as c "
+ "left join ChengyuTag as ct "
+ "on ct.ChengyuID = c.ID "
+ "left join Tags as t "
+ "on t.ID = ct.TagID "
+ "where Chinese like ?");
stmt.setString(1, "%" + target + "%");
break;
case 2:
stmt = conn.prepareStatement("select * from Chengyu as c "
+ "left join ChengyuTag as ct "
+ "on ct.ChengyuID = c.ID "
+ "left join Tags as t "
+ "on t.ID = ct.TagID "
+ "where Pinyin like ?");
stmt.setString(1, "%" + target + "%");
break;
case 3:
stmt = conn.prepareStatement("select * from Chengyu as c "
+ "left join ChengyuTag as ct "
+ "on ct.ChengyuID = c.ID "
+ "left join Tags as t "
+ "on t.ID = ct.TagID "
+ "where EnglishLiteral like ? or EnglishFigurative like ?");
stmt.setString(1, "%" + target + "%");
stmt.setString(2, "%" + target + "%");
break;
default:
stmt = conn.prepareStatement("select * from Chengyu as c "
+ "left join ChengyuTag as ct "
+ "on ct.ChengyuID = c.ID "
+ "left join Tags as t "
+ "on t.ID = ct.TagID "
+ "where Chinese like ?");
stmt.setString(1, "%" + target + "%");
break;
}
rs = stmt.executeQuery();
while (rs.next()) {
String tag = rs.getString("Tag");
Entry entry = new Entry(rs.getString("ID"), rs.getString("Abbr"), rs.getString("Chinese"),
rs.getString("ChineseExplanation"), rs.getString("EnglishLiteral"), rs.getString("EnglishFigurative"),
rs.getString("Pinyin"), rs.getString("Example"), rs.getString("ExampleTranslation"), rs.getString("Origin"),
rs.getString("OriginTranslation"), rs.getString("Frequency"));
// if(result.contains(entry)) {
// entry = result.get(result.indexOf(entry));
// if(tag != null) {
// entry.addTag(tag);
// }
// }else {
// if(tag != null) {
// entry.addTag(tag);
// }
// result.add(entry);
// }
boolean found = false;
for(int i=0; i<result.size();i++) {
Entry eachEntry = result.get(i);
if (eachEntry.getId().equals(rs.getString("ID"))) {
if(tag != null) {
eachEntry.addTag(tag);
}
found = true;
break;
}
}
if(!found) {
if(tag != null) {
entry.addTag(tag);
}
result.add(entry);
}
}
}catch(SQLException e) {
e.printStackTrace();
}
//return a result in a Arraylist with dictionaryentry
return result;
}
/**
* search function for dictionary
*
* @param
* @author Xuefeng and Jingwen
*/
@Override
public ArrayList<Entry> tagSearch(String target, int mode, ArrayList<String> tags) {
//doing the search
ArrayList<Entry> result = new ArrayList<Entry>();
ArrayList<Entry> tagResults = new ArrayList<Entry>();
Connection conn;
PreparedStatement stmt = null;
try {
conn = DriverManager.getConnection(DB_URL, USER, PASS);
ResultSet rs;
switch(mode) {
case 1:
stmt = conn.prepareStatement("select * from Chengyu as c "
+ "left join ChengyuTag as ct "
+ "on ct.ChengyuID = c.ID "
+ "left join Tags as t "
+ "on t.ID = ct.TagID "
+ "where Chinese like ?");
stmt.setString(1, "%" + target + "%");
break;
case 2:
stmt = conn.prepareStatement("select * from Chengyu as c "
+ "left join ChengyuTag as ct "
+ "on ct.ChengyuID = c.ID "
+ "left join Tags as t "
+ "on t.ID = ct.TagID "
+ "where Pinyin like ?");
stmt.setString(1, "%" + target + "%");
break;
case 3:
stmt = conn.prepareStatement("select * from Chengyu as c "
+ "left join ChengyuTag as ct "
+ "on ct.ChengyuID = c.ID "
+ "left join Tags as t "
+ "on t.ID = ct.TagID "
+ "where EnglishLiteral like ? or EnglishFigurative like ?");
stmt.setString(1, "%" + target + "%");
stmt.setString(2, "%" + target + "%");
break;
default:
stmt = conn.prepareStatement("select * from Chengyu as c "
+ "left join ChengyuTag as ct "
+ "on ct.ChengyuID = c.ID "
+ "left join Tags as t "
+ "on t.ID = ct.TagID "
+ "where Chinese like ?");
stmt.setString(1, "%" + target + "%");
break;
}
rs = stmt.executeQuery();
while (rs.next()) {
String tag = rs.getString("Tag");
Entry entry = new Entry(rs.getString("ID"), rs.getString("Abbr"), rs.getString("Chinese"),
rs.getString("ChineseExplanation"), rs.getString("EnglishLiteral"), rs.getString("EnglishFigurative"),
rs.getString("Pinyin"), rs.getString("Example"), rs.getString("ExampleTranslation"), rs.getString("Origin"),
rs.getString("OriginTranslation"), rs.getString("Frequency"));
boolean found = false;
for(int i=0; i<result.size();i++) {
Entry eachEntry = result.get(i);
if (eachEntry.getId().equals(rs.getString("ID"))) {
if(tag != null) {
eachEntry.addTag(tag);
}
found = true;
break;
}
}
if(!found) {
if(tag != null) {
entry.addTag(tag);
}
result.add(entry);
}
}
}catch(SQLException e) {
e.printStackTrace();
}
for(int i = 0; i < result.size(); i++) {
Entry eachEntry = result.get(i);
if (eachEntry.hasTags(tags)) {
tagResults.add(eachEntry);
}
}
return tagResults;
}
}
<file_sep>/chengyu.dict/src/main/java/com/colewe/ws1819/shared/Entry.java
package com.colewe.ws1819.shared;
import java.io.Serializable;
import java.util.ArrayList;
public class Entry implements Serializable{
String id;
String abbr;
String chinese;
String chineseExplanation;
String englishLiteral;
String englishFigurative;
String pinyin;
String example;
String exampleTranslation;
String origin;
String originTranslation;
String frequency;
ArrayList<String> tags;
public Entry() {
this.id = null;
this.abbr = null;
this.chinese = null;
this.chineseExplanation = null;
this.englishLiteral = null;
this.englishFigurative = null;
this.pinyin = null;
this.example = null;
this.exampleTranslation = null;
this.origin = null;
this.originTranslation = null;
this.frequency = null;
this.tags = new ArrayList<String>();
}
public Entry(String id, String abbr, String chinese, String chineseExplanation, String englishLiteral,
String englishFigurative, String pinyin, String example, String exampleTranslation,
String origin, String originTranslation, String frequency) {
this.id = id;
this.abbr = abbr;
this.chinese = chinese;
this.chineseExplanation = chineseExplanation;
this.englishLiteral = englishLiteral;
this.englishFigurative = englishFigurative;
this.pinyin = pinyin;
this.example = example;
this.exampleTranslation = exampleTranslation;
this.origin = origin;
this.originTranslation = originTranslation;
this.frequency = frequency;
this.tags = new ArrayList<String>();
}
public void addTag(String tag) {
if (this.tags.contains(tag)) {
return;
}
this.tags.add(tag);
}
public String getId() {
return this.id;
}
public String getAbbr() {
return this.abbr;
}
public String getChinese() {
return this.chinese;
}
public String getChineseExplanation() {
return this.chineseExplanation;
}
public String getEnglishLiteral() {
return this.englishLiteral;
}
public String getEnglishFigurative() {
return this.englishFigurative;
}
public String getPinyin() {
return this.pinyin;
}
public String getExample() {
return this.example;
}
public String getExampleTranslation() {
return this.exampleTranslation;
}
public String getOrigin() {
return this.origin;
}
public String getOrignTranslation() {
return this.originTranslation;
}
public String getFrequency() {
return this.frequency;
}
public ArrayList<String> getTags() {
return this.tags;
}
public boolean hasTags() {
if(this.tags.size() == 0) {
return false;
}
return true;
}
public String toString() {
String output = "";
output += this.id + "\t";
output += this.abbr + "\t";
output += this.chinese + "\t";
output += this.tags.size() + "\t";
for(String tag: this.tags) {
output += tag + "\t";
}
return output;
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (!(this.getClass() == obj.getClass())) {
return false;
}
final Entry other = (Entry)obj;
if (this.id != other.id) {
return false;
}
return true;
}
@Override
public int hashCode() {
return this.id.hashCode();
}
public boolean hasTags(ArrayList<String> tags) {
for(String tag: tags) {
if(!this.tags.contains(tag)) {
return false;
}
}
return true;
}
}
<file_sep>/chengyu.dict/src/main/webapp/WEB-INF/setup.sql
CREATE TABLE Chengyu (
ID INT NOT NULL,
Abbr VARCHAR(10),
Chinese VARCHAR(10),
EnglishLiteral VARCHAR(100),
EnglishFigurative VARCHAR(100),
Pinyin VARCHAR(40),
Example VARCHAR(255),
ExampleTranslation VARCHAR(1000),
Origin TEXT,
OriginTranslation TEXT,
Frequency SMALLINT,
CONSTRAINT PK_Chengyu PRIMARY KEY (ID)
);
CREATE TABLE Tags (
ID INT NOT NULL,
Tag VARCHAR(10),
CONSTRAINT PK_Tag PRIMARY KEY (ID)
);
CREATE TABLE ChengyuTag (
ChengyuID INT NOT NULL,
TagID INT NOT NULL,
CONSTRAINT PK_Chengyu_Tag PRIMARY KEY (ChengyuID, TagID),
CONSTRAINT FK_Chengyu FOREIGN KEY (ChengyuID) REFERENCES Chengyu(ID),
CONSTRAINT FK_Tag FOREIGN KEY (TagID) REFERENCES Tags(ID)
);
<file_sep>/chengyu.dict/src/main/java/com/colewe/ws1819/client/semesterproject.java
package com.colewe.ws1819.client;
import java.util.ArrayList;
import com.colewe.ws1819.shared.Entry;
import com.colewe.ws1819.shared.FieldVerifier;
import com.google.gwt.core.client.EntryPoint;
import com.google.gwt.core.client.GWT;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.event.dom.client.KeyCodes;
import com.google.gwt.event.dom.client.KeyUpEvent;
import com.google.gwt.event.dom.client.KeyUpHandler;
import com.google.gwt.user.client.Window;
import com.google.gwt.user.client.rpc.AsyncCallback;
import com.google.gwt.user.client.ui.Button;
import com.google.gwt.user.client.ui.DialogBox;
import com.google.gwt.user.client.ui.HTML;
import com.google.gwt.user.client.ui.Label;
import com.google.gwt.user.client.ui.RootPanel;
import com.google.gwt.user.client.ui.TextBox;
import com.google.gwt.user.client.ui.VerticalPanel;
import com.colewe.ws1819.client.DictionaryService;
import com.colewe.ws1819.client.DictionaryServiceAsync;
/**
* Entry point classes define <code>onModuleLoad()</code>.
*/
public class semesterproject implements EntryPoint {
/**
* The message displayed to the user when the server cannot be reached or
* returns an error.
*/
private static final String SERVER_ERROR = "An error occurred while "
+ "attempting to contact the server. Please check your network "
+ "connection and try again.";
private final DictionaryServiceAsync dictionarySvc = GWT.create(DictionaryService.class);
/**
* This is the entry point method.
*/
public void onModuleLoad() {
UIPanel uiPanel = new UIPanel();
RootPanel.get().add(uiPanel);
ClickHandler searchTag = new ClickHandler() {
public void onClick(ClickEvent e) {
String target = uiPanel.inputTextBox.getText();
if (!FieldVerifier.isValidInput(target)) {
Window.alert("Please enter the string you want to search");
return;
}
int mode = uiPanel.mode;
AsyncCallback<ArrayList<Entry>> callback = new AsyncCallback<ArrayList<Entry>>() {
@Override
public void onFailure(
Throwable caught) {
Window.alert("Failure");
}
@Override
public void onSuccess(
ArrayList<Entry> results) {
uiPanel.updateResult(results);
}
};
ArrayList<String> tags = uiPanel.getFilter();
dictionarySvc.tagSearch(target, mode, tags, callback);
}
};
ClickHandler download = new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
// TODO Auto-generated method stub
String target = uiPanel.inputTextBox.getText();
if (!FieldVerifier.isValidInput(target)) {
Window.alert("Please enter the string you want to search");
return;
}
int mode = uiPanel.mode;
String newUrl = "semesterproject/download?target="
+ target + "&mode=" + String.valueOf(mode);
Window.Location.replace(newUrl);
}
};
uiPanel.searchButton.addClickHandler(searchTag);
uiPanel.downloadButton.addClickHandler(download);
}
}
<file_sep>/chengyu.dict/src/main/java/com/colewe/ws1819/client/DictionaryServiceAsync.java
/**
*
*/
package com.colewe.ws1819.client;
import java.util.ArrayList;
import com.colewe.ws1819.shared.Entry;
import com.google.gwt.user.client.rpc.AsyncCallback;
/**
* @author jingwen and xuefeng
*
*/
public interface DictionaryServiceAsync{
// void search(AsyncCallback<ArrayList<String[]>> callback);
void search(String target, int mode, AsyncCallback<ArrayList<Entry>> callback);
void tagSearch(String target, int mode, ArrayList<String> tags, AsyncCallback<ArrayList<Entry>> callback);
}
<file_sep>/README.md
# How to install
To install our application, please follow the listed instructions:
1. First, you need an environment where GWT can be deployed. At this moment, we are using Eclipse plus a GWT plugin where we retrieved it from Eclipse marketplace.
2. Then, MySQL is needed in order to import our data.
* MySQL Workbench is optional but it helped comparing to MySQL command line tools.
3. To import our data, you need to run `data\_structure.sql` file where our data structures and relations were stored.
4. Then, you should first import Chengyu data from `chengyu\_data.json` file and tags data from `tags.csv` file, since there are foreign key dependencies.
5. Then, you can import chengyu tags relations from `chengyu\_tag.csv` file.
6. Following that, you need to go to `DictionaryServiceImpl.java` (located in `chengyu.dict\src\main\java\com\colewews1819\server\`) where database connection information was hard-coded in. Variables `DB\_URL`, `USER` and `PASS` need to be modified in order to make our program connect to the database.
7. Please run the application through Eclipse.
8. Open the page `http://localhost:8888/chengyudict` via a browser. We have tested and passed with Safari, Google Chrome and Firefox.
9. Enjoy it!
|
558888d099435774f27d80f02597897e2bd7b577
|
[
"Markdown",
"SQL",
"Python",
"Java"
] | 9 |
Python
|
Epromee/chengyu-dictionary
|
12b6b19f7280754562b120443aea8c1d2d6600fa
|
4bbcc1892401162d2b7728b44077ba4538f4827c
|
refs/heads/master
|
<repo_name>jeanklann/GravityTestsSimulation<file_sep>/SpaceSimulation/Program.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Drawing;
using System.IO;
using System.Drawing;
namespace SpaceSimulation {
public class Program {
public static Size ImageSize = new Size(1000, 1000);
public const double Proportion = 1E5;
public static void Main(string[] args) {
/*
for(double x = 0; x < 100; x+=1.45649852) {
Console.WriteLine(
SimplexNoise.noise(x, 0.76435434, 0.857438, 0.6432758)*0.5+0.5
);
}*/
Bitmap bitmap = new Bitmap(ImageSize.Width, ImageSize.Height);
CorpoCeleste Terra = new CorpoCeleste(CorpoCeleste.EarthMass, CorpoCeleste.EarthRadius);
DrawPlanet(Terra, bitmap);
//órbita geossíncrona
/*
Vector3 NavePosition = new Vector3(0, Terra.Radius + 35786E3, 0);
Vector3 VelocidadeNave = new Vector3(3074.6, 0, 0);
*/
Vector3 NavePosition = new Vector3(0, Terra.Radius + 2000E3, 0);
Vector3 VelocidadeNave = new Vector3(10000, 0, 0);
Console.WriteLine(VelocidadeNave);
for(int i = 0; i < 100000; i++) {
Point NavePositionPoint = Vector3ToImagePoint(NavePosition);
if(NavePositionPoint.X >= 0 && NavePositionPoint.X < ImageSize.Width &&
NavePositionPoint.Y >= 0 && NavePositionPoint.Y < ImageSize.Height) {
bitmap.SetPixel(NavePositionPoint.X, NavePositionPoint.Y, Color.Red);
}
Vector3 NavePositionNormalized = new Vector3(NavePosition.X, NavePosition.Y, 0);
NavePositionNormalized.Normalize();
VelocidadeNave -= SpaceMath.NewtonsToMs2(1, Terra.Gravity(1, Vector3.Distance(NavePosition, Terra.Position)))* NavePositionNormalized;
NavePosition += VelocidadeNave;
}
Console.WriteLine(VelocidadeNave);
bitmap.Save("D:/TesteImage.png");
Console.WriteLine("Done.");
//Console.ReadLine();
}
public static void DrawPlanet(CorpoCeleste Terra, Bitmap bitmap) {
for(int x = 0; x < ImageSize.Width; x++) {
for(int y = 0; y < ImageSize.Height; y++) {
Vector3 currentPos = ImagePointToVector3(new Point(x, y));
if(Vector3.Distance(currentPos, Terra.Position) <= Terra.Radius) {
bitmap.SetPixel(x, y, Color.Blue);
}
}
}
}
public static Point Vector3ToImagePoint(Vector3 pos) {
return new Point(
(int)((pos.X / Proportion) + ImageSize.Width/2.0),
(int)((pos.Y / Proportion) + ImageSize.Height/2.0)
);
}
public static Vector3 ImagePointToVector3(Point pos) {
return new Vector3(
((pos.X - ImageSize.Width / 2.0) * Proportion),
((pos.Y - ImageSize.Height / 2.0) * Proportion),
0
);
}
}
}
<file_sep>/SpaceSimulation/MainClasses/SpaceMath.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace SpaceSimulation {
public static class SpaceMath {
public const double G = 6.67408E-11;
public static double Gravity(double mass1, double mass2, double distance) {
return (G * mass1 * mass2) / (distance * distance);
}
public static double NewtonsToMs2(double mass, double newtons) {
return newtons / mass;
}
public static double Ms2ToNewtons(double mass, double acceleration) {
return acceleration * mass;
}
}
}
<file_sep>/SpaceSimulation/MainClasses/CorpoCeleste.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace SpaceSimulation {
public class CorpoCeleste {
public const double EarthMass = 5.9742E24;
public const double EarthRadius = 6371E3;
public double Mass = 1d;
public double Radius = 1d;
public Vector3 Velocity = new Vector3();
public Vector3 Position = new Vector3();
public CorpoCeleste() {
}
public CorpoCeleste(double mass, double radius) {
Mass = mass;
Radius = radius;
}
public double Gravity(double mass, double distance) {
return SpaceMath.Gravity(Mass, mass, distance);
}
}
}
|
cc1f478ab49d5ad13762bd2827103d661193caa0
|
[
"C#"
] | 3 |
C#
|
jeanklann/GravityTestsSimulation
|
c4bcfb7b7151988365a8f83030b1c14a8ce10b15
|
bbd724b3e5afafc6659ebc6c5ecadd74df2aae88
|
refs/heads/master
|
<file_sep>#!/bin/sh
# Note:
# Requires git-subtree
# https://github.com/apenwarr/git-subtree
cd ..
git remote add xboot <EMAIL>:alexforencich/xboot.git
git fetch xboot
git subtree merge -P firmware/xboot --squash -m "merged changes in xboot" xboot/master
|
c6099dd50a008c0b9261e0caf47c2463daff7bf7
|
[
"Shell"
] | 1 |
Shell
|
chazmatazz/xgrid
|
78cae41ae7bdcf06271329ebb2e2e44c24b3f420
|
894046466f0484f6198f58e152e339db6fc00e26
|
refs/heads/master
|
<file_sep># Reading Material
## Convenience
### Python
- [Setup multiple Python environments for Jupyter](https://medium.com/towards-data-science/environment-management-with-conda-python-2-3-b9961a8a5097)
- [Special method names](http://www.diveintopython3.net/special-method-names.html)
- [Design patterns](https://github.com/kamranahmedse/design-patterns-for-humans)
### Git
- [Workflow + Commands](http://blog.osteele.com/2008/05/my-git-workflow/)
## Time series
- [Bayesian TS](http://www.demographic-research.org/volumes/vol29/43/29-43.pdf)
- [Auto ARIMA Notes](https://www.linkedin.com/pulse/note-autoarima-boxcox-transform-al-yazdani/)
## Factor analysis
- [Intro with R](http://www.statpower.net/Content/312/R%20Stuff/Exploratory%20Factor%20Analysis%20with%20R.pdf)
## Other
- [Top R ML Packages](http://www.kdnuggets.com/2017/02/top-r-packages-machine-learning.html)
- [Window functions](https://cran.r-project.org/web/packages/dplyr/vignettes/window-functions.html)
## Summarized
- [Einstein.ai Abstractive Summarization](https://github.com/dkuostat/Reading-Material/blob/master/arXiv-1705-04304%20summary.txt)
<file_sep>library(nycflights13)
library(dplyr)
library(ggplot2)
head(flights)
### Filter rows ###
# Subsequent arguments to df are expressions that filter df
filter(flights, month == 1, day == 1)
filter(flights, month == 1 | month == 2)
### Arrange rows ###
# Reorder by variables
arrange(flights, year, month, day)
arrange(flights, desc(arr_delay))
### Select columns ###
# Name variables to keep
select(flights, year, month, day)
select(flights, year:day)
select(flights, -(year:day))
### Rename columns ###
select(flights, tail_num = tailnum) # Drop unmentioned columns
rename(flights, tail_num = tailnum) # Keep unmentioned columns
### Extract distinct rows ###
# By distinct combinations of named columns
distinct(flights, tailnum)
distinct(flights, origin, dest)
### Add new columns ###
# As function of existing columns
mutate(flights,
gain = arr_delay - dep_delay,
speed = distance / air_time * 60)
mutate(flights,
gain = arr_delay - dep_delay, # "gain" is a new column
gain_per_hour = gain / (air_time / 60)) # Reference new columns!
transmute(flights, # To only keep new vars
gain = arr_delay - dep_delay,
gain_per_hour = gain / (air_time / 60))
### Summarise values ###
# used with aggregate functions (vector => number)
# min(), max(), mean(), sum(), sd(), median(), IQR()
# dplyr-unique: n(): n_distinct(x), first(x), last(x), nth(x, n)
summarise(flights,
delay = mean(dep_delay, na.rm = TRUE))
### Randomly sample rows ###
sample_n(flights, 10) # fixed number
sample_frac(flights, 0.01) # fixed fraction
# To bootstrap, use replace = T
### Grouping ###
# grouped select() = ungrouped select(), except grouping vars are retained
# grouped arrange() orders first by grouping vars
# mutate() and filter() described in vignette("window-functions")
# sample_n() and sample_frac() sample specified rows in each group
# slice() extracts rows within each group
# summarise() will be shown:
# Split the complete dataset into individual planes and
# summarise each plane by counting:
# number of flights (count = n())
# computing the average distance (dist = mean(Distance, na.rm = TRUE))
# arrival delay (delay = mean(ArrDelay, na.rm = TRUE))
by_tailnum <- group_by(flights, tailnum)
delay <- summarise(by_tailnum,
count = n(),
dist = mean(distance, na.rm = TRUE),
delay = mean(arr_delay, na.rm = TRUE))
delay <- filter(delay, count > 20, dist < 2000)
ggplot(delay, aes(dist, delay)) +
geom_point(aes(size = count), alpha = 1/2) +
geom_smooth() +
scale_size_area()
### CHAINING ###
# cont
|
08db30b57a5d8145eb7757d30d127f4d3515105b
|
[
"Markdown",
"R"
] | 2 |
Markdown
|
dkuostat/Reading-Material
|
7394c75ab2860bcd95376bdd7b97287d35923275
|
80daa4034a7dfd879ea3d4ddd5ba5e2cc0b64a85
|
refs/heads/master
|
<repo_name>dctrud/clair-singularity<file_sep>/tests/test_image.py
import multiprocessing
import os
import subprocess
import time
import pytest
import requests
from clair_singularity.image import image_to_tgz, check_image, http_server, ImageException
from clair_singularity.util import sha256, err_and_exit, wait_net_service
@pytest.fixture
def testimage(tmpdir):
"""Fetch a test singularity image"""
cwd = os.getcwd()
os.chdir(tmpdir.strpath)
# This pulls a singularity lolcow image
subprocess.check_output(['singularity', 'pull', '-U', 'library://sylabsed/examples/lolcow:sha256.2c82ea3923489b14b7c6b7cc593f384c44e107a0a0579d9148fa1331d4508736'])
os.chdir(cwd)
return os.path.join(tmpdir.strpath, 'lolcow_sha256.2c82ea3923489b14b7c6b7cc593f384c44e107a0a0579d9148fa1331d4508736.sif')
def test_check_image(testimage):
# Valid image return True
assert check_image(testimage)
# Sys exit for invalid image
with pytest.raises(ImageException) as pytest_wrapped_e:
check_image('i_do_not_exist.img')
assert pytest_wrapped_e.type == ImageException
def test_image_to_tgz(testimage):
(temp_dir, tar_file) = image_to_tgz(testimage, False)
# Should have created a temporary dir
assert os.path.isdir(temp_dir)
# The tar.gz should exist
assert os.path.isfile(tar_file)
# With the correct sha256
# NO - the tar create in not reproducible (dir/file order?)
# assert sha256(tar_file) == '337436d1b561fd4d174a43474baf742c9d436d4a58a343275322517bad044d75'
def test_http_server(testimage, tmpdir):
"""Test we can retrieve a test file from in-built http server faithfully"""
httpd = multiprocessing.Process(target=http_server,
args=(os.path.dirname(testimage), '127.0.0.1', 8088, False))
httpd.daemon = True
httpd.start()
# Allow up to 30 seconds for the httpd to start and be answering requests
httpd_ready = wait_net_service('127.0.0.1', 8088, 30)
if not httpd_ready:
httpd.terminate()
err_and_exit("HTTP server did not become ready", 1)
r = requests.get('http://127.0.0.1:8088/lolcow_sha256.2c82ea3923489b14b7c6b7cc593f384c44e107a0a0579d9148fa1331d4508736.sif', stream=True)
tmpfile = os.path.join(tmpdir.strpath, 'downloaded.sif')
# Check the file is good
with open(tmpfile, 'wb') as fd:
for block in r.iter_content(1024):
fd.write(block)
httpd.terminate()
assert r.status_code == requests.codes.ok
assert sha256(tmpfile) == sha256(testimage)
<file_sep>/build_scripts/travis_tests.sh
#!/bin/bash
set -e
set -u
echo "Running setup.py install"
python setup.py install
echo "Running tests that don't need Clair"
pytest tests/ -v -m "not needs_clair" --cov clair_singularity --cov-report term-missing
if [[ $TRAVIS_PYTHON_VERSION == "3.6"* ]]; then
echo "Python 3.6 - running docker tests with Clair"
docker pull arminc/clair-db:2019-06-24
docker run -d --name clair-db arminc/clair-db:2019-06-24
sleep 5
docker pull arminc/clair-local-scan:v2.0.8_0ed98e9ead65a51ba53f7cc53fa5e80c92169207
docker run -p 6060:6060 --link clair-db:postgres -d --name clair arminc/clair-local-scan:v2.0.8_0ed98e9ead65a51ba53f7cc53fa5e80c92169207
docker ps
docker build -t clair-singularity .
# Clear out any old .pyc from the local tests
find . -name *.pyc -delete
docker run -v $TRAVIS_BUILD_DIR:/app --privileged --name clair-singularity --link clair --entrypoint '/bin/sh' clair-singularity -c pytest tests/ --cov clair_singularity --cov-report term-missing
fi
<file_sep>/clair_singularity/cli.py
import click
import json
from os import path
import shutil
from multiprocessing import Process
from . import VERSION
from .clair import check_clair, post_layer, get_report, format_report_text, ClairException
from .util import sha256, wait_net_service, err_and_exit, pretty_json
from .image import check_image, image_to_tgz, http_server, ImageException
@click.command()
@click.option('--clair-uri', default="http://localhost:6060",
help='Base URI for your Clair server')
@click.option('--text-output', is_flag=True, help='Report in Text (Default)')
@click.option('--json-output', is_flag=True, help='Report in JSON')
@click.option('--bind-ip', default="127.0.0.1",
help='IP address that the HTTP server providing image to Clair should listen on')
@click.option('--bind-port', default=8088,
help='Port that the HTTP server providing image to Clair should listen on')
@click.option('--quiet', is_flag=True, help='Suppress progress messages to STDERR')
@click.version_option(version=VERSION)
@click.argument('image', required=True)
def cli(image, clair_uri, text_output, json_output, bind_ip, bind_port, quiet):
API_URI = clair_uri + '/v1/'
# Check image exists, and export it to a gzipped tar in a temporary directory
try:
check_image(image)
(tar_dir, tar_file) = image_to_tgz(image, quiet)
except ImageError as e:
err_and_exit(e, 1)
# Image name for Clair will be the SHA256 of the .tar.gz
image_name = sha256(tar_file)
if not quiet:
click.echo("Image has SHA256: %s" % image_name, err=True)
# Make sure we can talk to Clair OK
try:
check_clair(API_URI, quiet)
except ClairException as e:
err_and_exit(e)
# Start an HTTP server to serve the .tar.gz from our temporary directory
# so that Clair can retrieve it
httpd = Process(target=http_server, args=(tar_dir, bind_ip, bind_port, quiet))
httpd.daemon = True
httpd.start()
# Allow up to 30 seconds for the httpd to start and be answering requests
httpd_ready = wait_net_service(bind_ip, bind_port, 30)
if not httpd_ready:
httpd.terminate()
shutil.rmtree(tar_dir)
err_and_exit("Error: HTTP server did not become ready\n", 1)
image_uri = 'http://%s:%d/%s' % (bind_ip, bind_port, path.basename(tar_file))
# Register the iamge with Clair as a docker layer that has no parent
try:
post_layer(API_URI, image_name, image_uri, quiet)
except ClairException as e:
httpd.terminate()
shutil.rmtree(tar_dir)
err_and_exit(e, 1)
# Done with the .tar.gz so stop serving it and remove the temp dir
httpd.terminate()
shutil.rmtree(tar_dir)
# Retrieve the vulnerability report from Clair
report = get_report(API_URI, image_name)
# Spit out the report on STDOUT
if json_output:
pretty_report = pretty_json(report)
click.echo(pretty_report)
else:
format_report_text(report)
<file_sep>/README.md
# clair-singularity
<a href="https://codeclimate.com/github/dctrud/clair-singularity"><img src="https://codeclimate.com/github/dctrud/clair-singularity/badges/gpa.svg" /></a>
<a href="https://travis-ci.org/dctrud/clair-singularity"><img src="https://travis-ci.org/dctrud/clair-singularity.svg?branch=master"></a>
[](https://coveralls.io/github/dctrud/clair-singularity?branch=master)
__Scan [Singularity](http://singularity.lbl.gov/) container images for security vulnerabilities
using [CoreOS Clair](https://github.com/coreos/clai).__
The [CoreOS Clair vulnerability scanner](https://github.com/coreos/clair) is a useful tool able to scan docker and other container
formats for security vulnerabilities. It obtains up-to-date lists of vulerabilities for various
platforms (namespaces) from public databases.
We can use Clair to scan singularity containers, by exploiting the fact that an exported .tar.gz of a
singularity container image is similar to a single layer docker image.
This tool:
* Exports a singularity image to a temporary .tar.gz file (this will be under $TMPDIR)
* Serves the .tar.gz file via an in-built http server, so the Clair service can retrieve it
* Calls the Clair API to ingest the .tar.gz file as a layer for analysis
* Calls the Clair API to retireve a vulnerability report for this layer
* Displays a simple text, or full JSON format report
Based on experiments detailed [in this Gist](https://gist.github.com/dctrud/479797e5f48cfe39cdb4b50a15e4c567)
__IMPORTANT NOTES__
This tool should be considered proof of concept, not heavily tested. Use at your own risk.
There is no support yet for SSL client certificates to verify that we are sending API requests to a trusted
Clair instance, or that only a trusted Clair instance can retrieve images from the inbuilt http server.
*This means that this solution is insecure except with an isolated local install of Clair*.
## Requirements
To use clair-singularity you will need a _Linux_ host with:
* Python 2.7 or greater installed
* Singularity 3+ installed (tested with 3.2.1) and the singularity executable in your `PATH`
* A Clair instance running somewhere, that is able to access the machine you will run
clair-singularity on. It's easiest to accomplish this using docker to run a local Clair instance as below.
## Starting a local Clair instance
If you have docker available on your local machine, the easiest way to start scanning your
Singularity images is to fire up a Clair instance locally, with docker. The official Clair docker images
are a blank slate, and do not include any vulnerability information. At startup Clair will have to
download vulnerability information from the internet, which can be quite slow. Images from github
user arminc are available that include pre-seeded databases:
https://github.com/arminc/clair-local-scan
To startup a Clair instance locally using these instances:
```bash
docker run -d --name db arminc/clair-db:2019-06-24
docker run -p 6060:6060 --link db:postgres -d --name clair arminc/clair-local-scan:v2.0.8_0ed98e9ead65a51ba53f7cc53fa5e80c92169207
```
*Replace the clair-db:2019-06-24 image tag with a later date for newer vulnerabilities*
## Installation
Clone the git repo, or download and extract the zip then:
```bash
python setup.py install
```
## Usage
__Clair on same machine__
If you are running `clair-singularity` locally (outside of docker), and clair
within docker, you need to tell `clair-singularity` to serve images on the main
IP of your host, so that dockerized clair can access them.
To scan a singularity image, using a clair instance running under local docker, on
port 6060:
clair-singularity --bind-ip 192.168.1.201 myimage.img
/Replace `192.168.1.201` with a non-localhost IP of your machine, accessible to
docker containers./
__Clair on a different machine__
If clair is running on a different machine, you must use the `--clair-uri`
option to specify the base URI to the clair instance, and the `--bind-ip` and/or
`--bind-port` options to specify a public IP and port on this machine, that
clair can access to retrieve images from `clair-singularity`.
clair-singularity --clair-uri http://10.0.1.202:6060 --bind-ip=10.0.1.201 --bind-port=8088 myimage.img
__Full JSON Reports__
By default, clair-singularity gives a simplified text report on STDOUT. To obtain the full JSON
report returned by Clair use the `--jsoon-output` option.
clair-singularity --json-output myimage.img
## Development / Testing
Tests can are run in 3 different ways:
__Local - no access to Clair__
Runs all tests that don't depend on access to a Clair server, using the local Python.
$ build_scripts/noclair_local_tests.sh
__Local - dockerized with Clair__
Starts a Clair service with local docker, builds clair-singularity into a docker container, with Python 3.5, and
runs tests in this docker container.
$ build_scripts/docker_local_tests.sh
__TravisCI__
Travis CI automated testing will test non-Clair dependent code using Python 2.7, 3.6, 3.7
Clair dependent code will be tested only in the 3.6 environment, by building the docker container, starting a Clair
service, and running tests in the docker container.
<file_sep>/Dockerfile
FROM singularityware/singularity:3.2.1
# Testing package
RUN apk add --update python3 python3-dev py3-pip build-base
RUN pip3 install flake8 pytest pytest-cov pytest-flake8 python-coveralls
RUN mkdir /app
COPY . /app
RUN cd /app && python3 setup.py install
EXPOSE 8088
EXPOSE 8081
EXPOSE 8082
WORKDIR /app
ENTRYPOINT [ '/usr/bin/clair-singularity' ]
<file_sep>/setup.py
"""
Scan Singularity container images using CoreOS Clair.
"""
from setuptools import find_packages, setup
dependencies = ['click', 'six', 'requests']
setup(
name='clair_singularity',
version='0.2.0',
url='https://github.com/dctrud/clair-singularity',
author='<NAME>',
author_email='<EMAIL>',
description='Scan Singularity container images using CoreOS Clair.',
long_description=__doc__,
packages=find_packages(exclude=['tests']),
include_package_data=True,
zip_safe=False,
platforms='any',
install_requires=dependencies,
setup_requires=['pytest-runner'],
tests_require=['pytest', 'pytest-cov', 'pytest-flake8'],
entry_points={
'console_scripts': [
'clair-singularity = clair_singularity.cli:cli',
],
},
classifiers=[
# As from http://pypi.python.org/pypi?%3Aaction=list_classifiers
# 'Development Status :: 1 - Planning',
# 'Development Status :: 2 - Pre-Alpha',
# 'Development Status :: 3 - Alpha',
'Development Status :: 4 - Beta',
# 'Development Status :: 5 - Production/Stable',
# 'Development Status :: 6 - Mature',
# 'Development Status :: 7 - Inactive',
'Environment :: Console',
'Intended Audience :: Developers',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Quality Assurance',
]
)
<file_sep>/build_scripts/docker_local_tests.sh
#!/bin/sh
set -e
set -u
set -x
docker stop clair-db clair || true
docker rm clair-db clair || true
docker pull arminc/clair-db:2019-06-24
docker run -d --name clair-db arminc/clair-db:2019-06-24
sleep 5
docker pull arminc/clair-local-scan:v2.0.8_0ed98e9ead65a51ba53f7cc53fa5e80c92169207
docker run -p 6060:6060 --link clair-db:postgres -d --name clair arminc/clair-local-scan:v2.0.8_0ed98e9ead65a51ba53f7cc53fa5e80c92169207
docker rm clair-singularity || true
docker build -t clair-singularity .
docker run --privileged --name clair-singularity --link clair --entrypoint '/bin/sh' clair-singularity -c pytest tests/ --cov clair_singularity --cov-report term-missing
docker stop clair-db clair clair-singularity || true
docker rm clair-db clair clair-singularity || true
<file_sep>/clair_singularity/clair.py
from __future__ import print_function
import pprint
import requests
import sys
import time
from .util import pretty_json
class ClairException(Exception):
pass
def check_clair(API_URI, quiet):
"""Check Clair is accessible by call to namespaces end point"""
if not quiet:
sys.stderr.write("Checking for Clair v1 API\n")
try:
r = requests.get(API_URI + 'namespaces')
namespace_count = len(r.json()['Namespaces'])
if not quiet:
sys.stderr.write("Found Clair server with %d namespaces\n" % namespace_count)
except Exception as e:
raise ClairException("Error - couldn't access Clair v1 API at %s\n%s\n" % (API_URI, e))
return True
def post_layer(API_URI, image_name, image_uri, quiet):
"""Register an image .tar.gz with Clair as a parent-less layer"""
try:
payload = {
"Layer": {"Name": image_name,
"Path": image_uri,
"Format": "Docker"}
}
if not quiet:
sys.stderr.write(pprint.pformat(payload))
time.sleep(1)
r = requests.post(API_URI + 'layers', json=payload)
if r.status_code == requests.codes.created:
if not quiet:
sys.stderr.write("Image registered as layer with Clair\n")
else:
raise ClairException("Failed registering image with Clair\n %s\n" % pretty_json(r.json()))
except Exception as e:
raise ClairException("Error - couldn't send image to Clair - %s\n" % (e))
def get_report(API_URI, image_name):
"""Retrieve and return the features & vulnerabilities report from Clair"""
try:
r = requests.get(API_URI + 'layers/' + image_name, params={'vulnerabilities': 'true'})
if r.status_code == requests.codes.ok:
return r.json()
else:
raise ClairException("Failed retrieving report from Clair\n %s\n" % pretty_json(r.json()))
except Exception as e:
raise ClairException("Error - couldn't retrieve report from Clair - %s\n" % (e))
def format_report_text(report):
"""Format the json into a very simple plain text report of vulnerabilities
per feature"""
features = report['Layer']['Features']
for feature in features:
if 'Vulnerabilities' in feature:
print("%s - %s" % (feature['Name'], feature['Version']))
print("-" * len(feature['Name'] + ' - ' + feature['Version']))
for vuln in feature['Vulnerabilities']:
print(vuln['Name'] + ' (' + vuln['Severity'] + ')')
print(vuln['Link'])
print(vuln['Description'])
print("\n")
<file_sep>/build_scripts/travis_singularity.sh
#!/bin/sh
sudo apt-get update && sudo apt-get install -y \
build-essential \
libssl-dev \
uuid-dev \
libgpgme11-dev \
squashfs-tools \
libseccomp-dev \
pkg-config
export VERSION=1.11 OS=linux ARCH=amd64 && \
wget https://dl.google.com/go/go$VERSION.$OS-$ARCH.tar.gz && \
sudo tar -C /usr/local -xzf go$VERSION.$OS-$ARCH.tar.gz && \
rm go$VERSION.$OS-$ARCH.tar.gz
echo 'export GOPATH=${HOME}/go' >> ~/.bashrc && \
echo 'export PATH=/usr/local/go/bin:${PATH}:${GOPATH}/bin' >> ~/.bashrc && \
source ~/.bashrc
export VERSION=3.2.1 && # adjust this as necessary \
mkdir -p $GOPATH/src/github.com/sylabs && \
cd $GOPATH/src/github.com/sylabs && \
wget https://github.com/sylabs/singularity/releases/download/v${VERSION}/singularity-${VERSION}.tar.gz && \
tar -xzf singularity-${VERSION}.tar.gz && \
cd ./singularity && \
./mconfig && \
make -C ./builddir && \
sudo make -C ./builddir install
<file_sep>/tests/test_clair.py
import pytest
from clair_singularity.clair import check_clair, post_layer, get_report
API_URL = 'http://clair:6060/v1/'
@pytest.mark.needs_clair
def test_check_clair():
# We can talk to the API
assert check_clair(API_URL,False)
<file_sep>/tests/test_util.py
from clair_singularity.util import sha256
def test_sha256():
"""Check we can get a sha256 on something that won't change often"""
assert sha256('.gitignore') == \
'da04d844bb8a1fd051cfc7cb8bba1437f3f237f48d2974d72f749ad7fbfd1d96'
<file_sep>/build_scripts/noclair_local_tests.sh
#!/bin/bash
set -e
set -u
pytest tests/ -v -m "not needs_clair" --cov clair_singularity --cov-report term-missing
<file_sep>/clair_singularity/image.py
import subprocess
import sys
import tempfile
from os import path, chdir
from six.moves import SimpleHTTPServer, socketserver
class ImageException(Exception):
pass
def check_image(image):
"""Check if specified image file exists"""
if not path.isfile(image):
raise ImageException('Error: Singularity image "%s" not found.' % image)
return True
def image_to_tgz(image, quiet):
"""Export the singularity image to a tar.gz file"""
sandbox_dir = tempfile.mkdtemp()
tar_dir = tempfile.mkdtemp()
tar_gz_file = path.join(tar_dir, path.basename(image) + '.tar.gz')
cmd = ['singularity', 'build', '-F', '--sandbox', sandbox_dir, image]
if not quiet:
sys.stderr.write("Exporting image to sandbox.\n")
try:
subprocess.check_call(cmd)
except (subprocess.CalledProcessError, OSError) as e:
raise ImageException("Error calling Singularity export to create sandbox\n%s" % e)
cmd = ['tar', '-C', sandbox_dir, '-zcf', tar_gz_file, '.']
if not quiet:
sys.stderr.write("Compressing to .tar.gz\n")
try:
subprocess.check_call(cmd)
except subprocess.CalledProcessError as e:
raise ImageException("Error calling gzip export to compress .tar file\n%s" % e)
return (tar_dir, tar_gz_file)
class QuietSimpleHTTPHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
def log_message(self, format, *args):
pass
def http_server(dir, ip, port, quiet):
"""Use Python's Simple HTTP server to expose the image over HTTP for
clair to grab it.
"""
sys.stderr.write("Serving Image to Clair from http://%s:%d\n" % (ip, port))
chdir(dir)
if quiet:
Handler = QuietSimpleHTTPHandler
else:
Handler = SimpleHTTPServer.SimpleHTTPRequestHandler
httpd = socketserver.TCPServer((ip, port), Handler)
httpd.serve_forever()
<file_sep>/clair_singularity/util.py
import hashlib
import json
import sys
def sha256(fname):
"""Compute sha256 hash for file fname"""
hash_sha256 = hashlib.sha256()
with open(fname, "rb") as f:
for chunk in iter(lambda: f.read(65536), b""):
hash_sha256.update(chunk)
return hash_sha256.hexdigest()
def pretty_json(obj):
"""Format an object into json nicely"""
return json.dumps(obj, separators=(',', ':'), sort_keys=True, indent=2)
def err_and_exit(e, code=1):
"""Write exception to STDERR and exit with supplied code"""
sys.stderr.write(str(e))
sys.exit(code)
# http://code.activestate.com/recipes/576655-wait-for-network-service-to-appear/
#
#
# Copyright (c) 2017 ActiveState Software Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of
# the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
def wait_net_service(server, port, timeout=None):
""" Wait for network service to appear
@param timeout: in seconds, if None or 0 wait forever
@return: True of False, if timeout is None may return only True or
throw unhandled network exception
"""
import socket
s = socket.socket()
if timeout:
from time import time as now
# time module is needed to calc timeout shared between two exceptions
end = now() + timeout
while True:
try:
if timeout:
next_timeout = end - now()
if next_timeout < 0:
return False
else:
s.settimeout(next_timeout)
s.connect((server, port))
except (socket.timeout, socket.error):
pass
else:
s.close()
return True
<file_sep>/tests/test_cli.py
import pytest
import json
import socket
from click.testing import CliRunner
from clair_singularity.cli import cli
from .test_image import testimage
MY_IP = socket.gethostbyname(socket.gethostname())
@pytest.fixture
def runner():
return CliRunner()
def test_help(runner):
result = runner.invoke(cli, ['--help'])
assert 'Usage:' in result.output
@pytest.mark.needs_clair
def test_full_json(runner, testimage):
result = runner.invoke(cli,
['--quiet', '--json-output', '--bind-ip', MY_IP, '--bind-port', '8081', '--clair-uri',
'http://clair:6060', testimage])
output = json.loads(result.output)
# Using the specific lolcow image and the 2019-06-24 clair db...
# There are 97 features in the container scan, and 23 have vulnerabilities
assert 'Layer' in output
assert 'Features' in output['Layer']
assert len(output['Layer']['Features']) == 97
features_with_vuln = 0
for feature in output['Layer']['Features']:
if 'Vulnerabilities' in feature:
features_with_vuln = features_with_vuln + 1
assert features_with_vuln == 23
@pytest.mark.needs_clair
def test_full_text(runner, testimage):
result = runner.invoke(cli, ['--quiet', '--bind-ip', MY_IP, '--bind-port', '8082', '--clair-uri',
'http://clair:6060', testimage])
# Check we do have some CVEs we expect reported here
assert 'ssl' in result.output
assert 'CVE' in result.output
|
d9ea5ae58f76d45763852a9942b21a822e201168
|
[
"Markdown",
"Python",
"Dockerfile",
"Shell"
] | 15 |
Python
|
dctrud/clair-singularity
|
8a35125aaa9ef60418c7b708dd4cd7318402e6f5
|
3aeed7cb84cfafddd057556acccddedf35fa25fe
|
refs/heads/master
|
<repo_name>kkathuria93/js-deli-counter-bootcamp-prep-000<file_sep>/index.js
var katzDeli = [];
function takeANumber(katzDeli, name){
katzDeli.push(name)
var positionLine = (katzDeli.length)
return ("Welcome, " + name + ". You are number " + positionLine + " in line.")
}
function nowServing(katzDeli){
if (katzDeli.length > 0) {
var firstPerson = katzDeli[0]
return "Currently serving " + katzDeli.shift() + "."
return katzDeli
} else {
return "There is nobody waiting to be served!"
}
}
function currentLine(katzDeli){
var katzDeliLine = []
var i, j
if (katzDeli.length > 0) {
for (i = 0, j = katzDeli.length; i < j; i++) {
katzDeliLine.push(" " + (i + 1) + ". " + katzDeli[i])
}
return `The line is currently:${katzDeliLine}`
} else {
return "The line is currently empty."
}
}
|
686d641680281d5d475e5071353f828714fca5d3
|
[
"JavaScript"
] | 1 |
JavaScript
|
kkathuria93/js-deli-counter-bootcamp-prep-000
|
a9dbaed30a15c75a012666272bb7058884868e51
|
7617e6b8aeb707ceb840871b5554b9d03c4871c4
|
refs/heads/master
|
<file_sep>hunalign=/mnt/home/boxiang/software/hunalign-1.1/src/hunalign/hunalign
ladder2text=/mnt/home/boxiang/software/hunalign-1.1/scripts/ladder2text.py
dictionary=/mnt/home/boxiang/software/hunalign-1.1/data/null.dic
in_dir=../processed_data/evaluation/nejm/align/hunalign/input/
out_dir=../processed_data/evaluation/nejm/align/hunalign/align/
mkdir -p $out_dir
$hunalign -utf -realign -batch $dictionary $in_dir/batch
while read line; do
src=$(echo "$line" | cut -f1)
tgt=$(echo "$line" | cut -f2)
ladder=$(echo "$line" | cut -f3)
echo Source: $src
echo Target: $tgt
echo Alignment: $ladder
bitext=${ladder/.ladder/.bitext}
bitext=${bitext/$in_dir/$out_dir}
python2.7 $ladder2text $ladder $src.mark $tgt.mark > $bitext
done < $in_dir/batch
max=$(cat $out_dir/*bitext | cut -f 1 | sort -n | tail -n1)
min=$(cat $out_dir/*bitext | cut -f 1 | sort -n | head -n1)
for i in `seq $min 0.05 $max`; do
echo $i
mkdir -p $out_dir/$i
cat $out_dir/*bitext | awk '{if ($1 >= threshold) {print $0}}' threshold=$i | cut -f 2 > $out_dir/$i/align.zh
cat $out_dir/*bitext | awk '{if ($1 >= threshold) {print $0}}' threshold=$i | cut -f 3 > $out_dir/$i/align.en
done<file_sep>import argparse
import pandas as pd
import os
from collections import defaultdict
parser = argparse.ArgumentParser(description="Convert alignment file "\
"into parallel corpora.")
parser.add_argument("--align_fn", type=str, help="Path to ground-truth "\
"alignment file.")
parser.add_argument("--zh_fn", type=str, help="Path to English sentences.")
parser.add_argument("--en_fn", type=str, help="Path to Chinese sentences.")
parser.add_argument("--out_fn", type=str, help="Path to output directory.")
args = parser.parse_args()
os.makedirs(os.path.dirname(args.out_fn), exist_ok=True)
def align_en_zh(align, en, zh):
align["zh"] = [x.split(" <=> ")[0] for x in align["align"]]
align["en"] = [x.split(" <=> ")[1] for x in align["align"]]
docs = align.doc.unique()
alignment = defaultdict(list)
for doc in docs:
e = en[en.doc == doc]
z = zh[zh.doc == doc]
a = align[align.doc == doc]
if e.shape[0] == 0 or z.shape[0] == 0:
continue
for i, j, status in \
zip(a["zh"], a["en"], a["status"]):
zh_sent = ""
en_sent = ""
add_to_alignment = True
for v in i.split(","):
if v != "omitted":
v = int(v) - 1
zh_sent += z["sent"].iloc[v]
else:
add_to_alignment = False
for w in j.split(","):
if w != "omitted":
w = int(w) - 1
en_sent += e["sent"].iloc[w]
else:
add_to_alignment = False
if add_to_alignment:
alignment["doc"].append(doc)
alignment["align"].append("{} <=> {}".format(i,j))
alignment["status"].append(status)
alignment["zh"].append(zh_sent)
alignment["en"].append(en_sent)
alignment = pd.DataFrame(alignment)
return alignment
def read_data(args):
shape_getter = pd.read_table(args.align_fn, nrows=10)
ncol = shape_getter.shape[1]
print(f"{ncol} columns detected in alignment file.")
if ncol == 3:
align = pd.read_table(args.align_fn, names=["doc", "align", "status"])
elif ncol == 4:
align = pd.read_table(args.align_fn, names=["pmid", "doc", "align", "status"])
else:
raise ValueError(f"Column = {ncol} has not been implemented.")
if args.en_fn is not None and args.zh_fn is not None:
en = pd.read_table(args.en_fn, names=["doc", "sent_id", "sent"])
zh = pd.read_table(args.zh_fn, names=["doc", "sent_id", "sent"])
align = align_en_zh(align, en, zh)
else:
en = None
zh = None
return align, en, zh
def main(args):
align, _, _ = read_data(args)
for lang in ["zh", "en"]:
with open(f"{args.out_fn}.{lang}", "w+") as f:
for sent in align[lang]:
f.write(sent + "\n")
if __name__ == "__main__":
main(args)<file_sep>#!/bin/bash
# Must be run on GPU nodes:
ONMT=/mnt/home/boxiang/projects/OpenNMT-py
WMT18=../processed_data/translation/wmt18/train_rnn/ # Path to the WMT18 baseline model.
NEJM=../processed_data/translation/nejm/train_denovo_rnn/ # Path to model fine-tuned on NEJM.
BPE_DIR=../processed_data/translation/wmt18/train_rnn/data/
VALID_DATA=../processed_data/split_data/split_train_test/ # Path to NEJM valid set.
TEST_SRC=$VALID_DATA/nejm.test.zh
TEST_TGT=$VALID_DATA/nejm.test.en
$ONMT/tools/apply_bpe.py -c $BPE_DIR/bpe-codes.zh < $TEST_SRC > $NEJM/test/test.zh
$ONMT/tools/apply_bpe.py -c $BPE_DIR/bpe-codes.en < $TEST_TGT > $NEJM/test/test.en
src=$NEJM/test/test.zh
tgt=$NEJM/test/test.en
# Testing Chinese to English translation:
models=($NEJM/models/4000/zh2en_step_100000.pt \
$NEJM/models/8000/zh2en_step_100000.pt \
$NEJM/models/16000/zh2en_step_100000.pt \
$NEJM/models/32000/zh2en_step_100000.pt \
$NEJM/models/64000/zh2en_step_100000.pt \
$NEJM/models/93303/zh2en_step_100000.pt)
translations=(nejm.4000.zh2en \
nejm.8000.zh2en \
nejm.16000.zh2en \
nejm.32000.zh2en \
nejm.64000.zh2en \
nejm.93303.zh2en)
for i in {0..5}; do
model=${models[$i]}
translation=${translations[$i]}
echo "Translate $translation"
python $ONMT/translate.py \
-batch_size 1 \
-model $model \
-src $src \
-output $NEJM/test/$translation \
-replace_unk -verbose \
-beam_size 1 \
-gpu 0 > $NEJM/test/${translation}.log
echo "BPE decoding/detokenising target to match with references"
mv $NEJM/test/$translation{,.bpe}
cat $NEJM/test/$translation.bpe | sed -E 's/(@@ )|(@@ ?$)//g' > $NEJM/test/$translation
echo $NEJM/test/$translation
$ONMT/tools/multi-bleu.perl $TEST_TGT < $NEJM/test/$translation > $NEJM/test/$translation.tc.bleu
done
# Testing English to Chinese translation:
models=($NEJM/models/4000/en2zh_step_100000.pt \
$NEJM/models/8000/en2zh_step_100000.pt \
$NEJM/models/16000/en2zh_step_100000.pt \
$NEJM/models/32000/en2zh_step_100000.pt \
$NEJM/models/64000/en2zh_step_100000.pt \
$NEJM/models/93303/en2zh_step_100000.pt)
translations=(nejm.4000.en2zh \
nejm.8000.en2zh \
nejm.16000.en2zh \
nejm.32000.en2zh \
nejm.64000.en2zh \
nejm.93303.en2zh)
for i in {0..5}; do
model=${models[$i]}
translation=${translations[$i]}
echo "Translate $translation"
python $ONMT/translate.py \
-batch_size 1 \
-model $model \
-src $tgt \
-output $NEJM/test/$translation \
-replace_unk -verbose \
-beam_size 1 \
-gpu 0 > $NEJM/test/${translation}.log
echo "BPE decoding/detokenising target to match with references"
mv $NEJM/test/$translation{,.bpe}
cat $NEJM/test/$translation.bpe | sed -E 's/(@@ )|(@@ ?$)//g' > $NEJM/test/$translation
echo $NEJM/test/$translation
$ONMT/tools/multi-bleu.perl $TEST_SRC < $NEJM/test/$translation > $NEJM/test/$translation.tc.bleu
done
<file_sep>import re
import os
import glob
import time
from collections import defaultdict, Counter
from dateutil import parser
import pandas as pd
from nltk.tokenize.punkt import PunktSentenceTokenizer, PunktTrainer
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
###########
# Crawler #
###########
def get_years(driver):
elements = driver.find_elements_by_class_name("field_video_a")
years = {}
for elem in elements:
a = elem.find_element_by_tag_name("a")
hyperlink = a.get_attribute("href")
years[a.text] = hyperlink
return years
def get_months(driver, year_url):
elements = driver.find_elements_by_xpath("//a[@role='tab']")
months = {}
for elem in elements:
hyperlink = elem.get_attribute("href")
hyperlink = hyperlink.replace(year_url, "")
month = elem.text.replace("月","")
if month != "":
months[month] = hyperlink
return months
def get_url_to_english_page(driver):
try:
href = driver.find_element_by_class_name("font-TNR.ft-red.ft_title_e")
url = href.get_attribute("href")
except:
url = None
return url
def get_english_title(article_driver, article_id):
if "jw" in article_id: # journal watch articles
class_ = "page-title"
else:
class_ = "title_default"
timeout = 60
en_title = ""
try:
WebDriverWait(article_driver, timeout).\
until(EC.presence_of_element_located(
(By.CLASS_NAME, class_)))
while en_title == "":
time.sleep(0.2)
en_title = article_driver.\
find_element_by_class_name(class_).text
except:
print("Timeout!")
en_title = ""
return en_title
class Container(dict):
def read_from_disk(self, root_dir):
self.clear()
print("\n")
print("################################")
print("#### Reading existing files ####")
print("################################")
years = glob.glob(root_dir + "*")
years = [os.path.basename(x) for x in years]
for year in years:
print("Year: {}".format(year))
sub_dir = "{}/{}/".format(root_dir, year)
months = glob.glob(sub_dir + "*.txt")
months = [os.path.basename(x).replace(".txt", "") \
for x in months]
self[year] = {}
for month in months:
print("Month: {}".format(month))
self[year][month] = {}
fn = "{}/{}.txt".format(sub_dir, month)
with open(fn, "r") as f:
for line in f:
split_line = line.split("\t")
year = split_line[0].strip()
month = split_line[1].strip()
article_id = split_line[2].strip()
zh_title = split_line[3].strip()
en_title = split_line[4].strip()
zh_url = split_line[5].strip()
en_url = split_line[6].strip()
self[year][month][article_id] = \
(zh_title, en_title, zh_url, en_url)
def traverse(self, driver, out_dir):
print("\n")
print("#################################")
print("#### Traversing NEJM website ####")
print("#################################")
years = get_years(driver)
print(f"Found a total of {len(years)} years:")
for year in years:
print(year)
article_driver = webdriver.\
Chrome(options=chrome_options)
for year, year_url in years.items():
print("Year: {}".format(year))
os.makedirs(os.path.join(out_dir, year), exist_ok=True)
if year not in self:
self[year] = {}
driver.get(year_url)
months = get_months(driver, year_url)
print(f"Found a total of {len(months)} months:")
for month in months:
print(month)
for month, month_href in months.items():
print("Month: {}.".format(month))
if month not in self[year]:
self[year][month] = {}
xpath_query = "//a[@href='{}']".format(month_href)
month_element = driver.find_element_by_xpath(xpath_query)
month_element.click() # redirect driver to that month.
time.sleep(0.5)
month_id = month_href.replace("#","") # The id and href is off by a # sign.
xpath_query = f"//div[@id='{month_id}']//"\
"div[@class='weeklycover_box']"\
"//div[@class='box_70c']//a"
articles = driver.find_elements_by_xpath(xpath_query)
print(f"Found a total of {len(articles)} articles:")
for article in articles:
print(article.text)
fn = "{}.txt".format(os.path.join(out_dir, year, month))
with open(fn, "a+") as f:
for art in articles:
zh_title = art.text.replace("•","").strip()
if "Vol." in zh_title and "No." in zh_title:
print("{} is the TOC. Skip.".format(zh_title))
elif zh_title != "" and zh_title != "\ue735":
zh_url = art.get_attribute("href")
article_id = zh_url.split("/")[-1].lower()
article_id = re.sub("yxqy-*","", article_id).\
replace("nejm", "").\
replace(",", ".") # Normalize IDs.
if article_id in self[year][month]:
print(f"Article {zh_title} already stored. Skip.")
continue # Avoid repeating work
else:
print(f"Getting article: {zh_title}.")
article_driver.get(zh_url)
en_url = get_url_to_english_page(article_driver).\
split("?")[0] # Remove unnecessary suffix
article_driver.get(en_url)
en_title = get_english_title(
article_driver, article_id)
self[year][month][article_id] = \
(zh_title, en_title, zh_url, en_url)
if en_title != "":
f.write("\t".join([year, month, article_id, \
zh_title, en_title, zh_url, en_url]) + "\n")
else:
f.write("\t".join([year, month, article_id, \
zh_title, "MISSING", zh_url, en_url]) + "\n")
f.flush()
article_driver.close()
def write_to_disk(self, out_dir):
for year, months in self.items():
os.makedirs(os.path.join(out_dir, year), exist_ok=True)
for month, articles in months.items():
fn = "{}.txt".format(os.path.join(out_dir, year, month))
with open(fn, "w") as f:
for title, (zh_url, en_url) in articles.items():
f.write("\t".join([title, zh_url, en_url]) + "\n")
f.flush()
def count_articles(self):
proto_df = defaultdict(list)
for year, months in self.items():
for month, articles in months.items():
proto_df["year"].append(year)
proto_df["month"].append(month)
proto_df["articles"].append(len(articles))
df = pd.DataFrame(proto_df)
df.sort_values(by=["year", "month"], inplace=True)
return df
def get_all_article_paths(self, root_dir, ext):
article_paths = []
for year, months in self.items():
for month, articles in months.items():
for article, (_,_) in articles.items():
article_paths.append("{}/{}/{}/{}.{}".format(
root_dir, year, month, article, ext))
print("{} Articles.".format(len(article_paths)), flush=True)
return article_paths
def read_article_urls(in_dir):
container = []
article_files = glob.glob(f"{in_dir}/*/*.txt")
for fn in article_files:
print(f"Filename: {fn}")
container.append(pd.read_table(fn, header=None))
articles = pd.concat(container)
articles.columns = ["year", "month", \
"id", "zh_title", "en_title", \
"zh_url", "en_url"]
print(f"Total number of articles: {articles.shape[0]}")
# Check all articles are unique:
assert articles["id"].unique().shape[0] == articles.shape[0], \
"Duplicate articles exists."
return articles
abbrev = {"cp": "Clinical Practice",
"oa": "Original Article",
"ra": "Review Article",
"cpc": "Case Records",
"sr": "Special Report",
"ct": "Clinical Therapeutics",
"jw.na": "Journal Watch",
"clde": "Clinical Decisions",
"cps": "Clinical Prob Solving",
"p": "Perspective",
"e": "Editorial",
"cibr": "Clinical Implications\nof Basic Research",
"icm": "Images in Clinical Med",
"ms": "Medicine and Society",
"c": "Correspondence",
"sa": "Special Article",
"x": "Correction",
"hpr": "Health Policy Report"}
def read_and_preprocess_article(path, lang):
article = get_article_as_lowercase_string(path)
if lang == "en":
article = article.replace("\n. opens in new tab\n", "")
elif lang == "zh":
pass
else:
raise ValueError("Unknown language: {}".format(lang))
return article
def get_article_as_lowercase_string(path):
with open(path, "r") as f:
article = f.read().lower()
return article
def get_article(path):
with open(path, "r") as f:
article = f.read()
return article
def get_nltk_sent_tokenizer(container, lang):
assert lang in ["zh", "en"], "Unknown language."
trainer = PunktTrainer()
if isinstance(container, Container):
article_paths = container.get_all_article_paths(
root_dir="../processed_data/crawler/nejm/articles/",
ext=lang)
elif isinstance(container, list):
print("{} Articles.".format(len(container)))
article_paths = container
else:
raise ValueError("Cannot parse container with class {}".\
format(container.__class__))
missing_count = 0
for path in article_paths:
try:
article = get_article_as_lowercase_string(path)
trainer.train(text=article, finalize=False)
except FileNotFoundError:
print("{} not found.".format(path))
missing_count += 1
print("{} articles not found.".format(missing_count))
trainer.finalize_training()
tokenizer = PunktSentenceTokenizer(trainer.get_params())
return tokenizer
def translate(text, translator, src="zh-cn", dest="en"):
return [x.text.lower() for x in \
translator.translate(text, src=src, dest=dest)]
class RegexSentenceTokenizer():
def __init__(self, regex):
self.regex = regex
def tokenize(self, text):
sents = re.split(pattern=self.regex, string=text)
punctuations = re.findall(pattern=self.regex, string=text)
sent_len = len(sents)
punct_len = len(punctuations)
assert (sent_len <= (punct_len + 1)) and (punct_len <= sent_len), \
print("Found {} sentences and {} punctuations.".\
format(sent_len, punct_len))
for i, p in enumerate(punctuations):
sents[i] += p
sents = [x.strip() for x in sents]
if sents[-1] == "":
sents.pop()
return sents
def get_sentences(sent_tokenizers, texts):
if sent_tokenizers[0] is None:
return []
if not isinstance(texts, list):
texts = [texts]
for tokenizer in sent_tokenizers:
sentences = []
for t in texts:
s = tokenizer.tokenize(t)
sentences += s
texts = sentences
return sentences
class AnnoStr(str):
def __new__(cls, text, sent_tokenizers):
return super().__new__(cls, text)
def __init__(self, text, sent_tokenizers):
self.detect_numbers()
self.detect_headers()
self.count_sentences(sent_tokenizers)
def detect_headers(self):
text = self.strip()
group = 0 # default type
exact = {"abstract": 1,
"摘要": 1,
"background": 2,
"背景": 2,
"methods": 3,
"方法": 3,
"results": 4,
"结果": 4,
"conclusions": 5,
"结论": 5,
"study population": 6,
"研究人群": 6,
"trial regimen": 7,
"trial regimens": 7,
"试验治疗方案": 7,
"trial outcomes": 8,
"试验结局": 8,
"trial populations": 9,
"trial population": 9,
"试验人群": 9,
"discussion": 10,
"讨论": 10,
"trial design and oversight": 11,
"试验设计和监管": 11,
"patient population": 12,
"患者人群": 12,
"statistical analysis": 13,
"统计学分析": 13,
"patients": 14,
"患者": 14,
"trial design": 15,
"试验设计": 15,
"疗效": 16,
"效果": 16,
"有效性": 16,
"efficacy": 16
}
if text in exact:
group = exact[text]
elif text.endswith("终点") or \
text.endswith("end points") or \
text.endswith("end point"):
group = 17
elif text.endswith("评估") or \
text.endswith("assessment") or \
text.endswith("assessments"):
group = 18
elif text.endswith("安全性") or \
text.endswith("safety"):
group = 19
else:
pass
self.group = group
def detect_numbers(self):
numbers = re.findall("\d", self)
self.number = Counter(numbers)
def count_sentences(self, sent_tokenizers):
sentences = get_sentences(sent_tokenizers, self)
self.num_sents = len(sentences)
self.sents = sentences
class Article():
def __init__(self, path, lang, sent_tokenizers=None):
self.path = path
self.lang = lang
self.sent_tokenizers = sent_tokenizers \
if isinstance(sent_tokenizers, list) \
else [sent_tokenizers]
# self.article = read_and_preprocess_article(path, lang)
self.article = get_article(path)
self.paragraphs = [AnnoStr(x, self.sent_tokenizers) \
for x in self.article.split("\n") if x != ""]
self.filter_paragraphs()
self.sentences = get_sentences(
self.sent_tokenizers, self.kept_paragraphs)
def is_boilerplate(self, text):
lang = self.lang
text = text.strip()
def is_date(text):
try:
parser.parse(text)
return True
except ValueError:
return False
def is_reviewer_intro(text):
text = text.strip()
last_three_words = " ".join(text.split(" ")[-3:])
last_two_words = " ".join(last_three_words.split(" ")[-2:])
if is_date(last_three_words) or \
is_date(last_two_words):
if "reviewing" in text:
return True
if text.startswith("comments"):
return True
return False
english_boilerplates = ["access provided by",
"access provided by lane medical library, "\
"stanford university med center",
"lane medical library, stanford university med center",
"subscribe",
"or renew",
"institution: stanford university",
"original article",
"metrics",
"editorial",
"clinical problem-solving",
"perspective",
"audio interview",
"download",
"video",
"focus on research",
"history of clinical trials"
]
chinese_boilerplates = ["图1.", "图2.", "图3.", "图4.",
"图5.", "表1.", "表2.", "表3.", "表4.", "表5.",
"nothing to disclose"]
if lang == "en":
if is_date(text):
return True
elif is_reviewer_intro(text):
return True
elif text in english_boilerplates:
return True
elif re.search("\([0-9]{2}:[0-9]{2}\)", text):
return True
elif text.startswith("copyright ©"):
return True
elif text.startswith("nejm journal watch"):
return True
elif text.startswith("supported by"):
return True
elif text.startswith("the discovehr study was partially funded"):
return True
else:
return False
elif lang == "zh":
if text in chinese_boilerplates:
return True
elif text.startswith("评论"):
return True
elif text.startswith("引文"):
return True
elif text.startswith("出版时的编辑声明"):
return True
elif text.startswith("supported by"):
return True
elif text.startswith("the discovehr study was partially funded"):
return True
else:
return False
else:
raise ValueError("Unknown language: {}".format(lang))
def filter_paragraphs(self):
kept_paragraphs = []
filtered_paragraphs = []
for para in self.paragraphs:
if para.strip() == "":
continue
if self.is_boilerplate(para):
filtered_paragraphs.append(para)
else:
kept_paragraphs.append(para)
self.kept_paragraphs = kept_paragraphs
self.filtered_paragraphs = filtered_paragraphs
def get_paragraph_lengths(self):
if self.lang == "en":
lengths = [len(para.split(" ")) \
for para in self.paragraphs \
if len(para) != 0]
elif self.lang == "zh":
lengths = []
for para in self.paragraphs:
para = re.sub("[a-z]", "", para)
length = len(para)
if length != 0:
lengths.append(len(para))
else:
raise ValueError("Language not supported: {}".\
format(self.lang))
return lengths
def write_to_disk(self, out_fn, level):
if level == "sentence":
with open(out_fn, "w") as f:
for sent in self.sentences:
f.write(sent + "\n")
elif level == "article":
with open(out_fn, "w") as f:
f.write(self.article)
elif level == "paragraph":
with open(out_fn, "w") as f:
for para in self.paragrahs:
f.write(para + "\n")
else:
raise ValueError("Unknown level: {}".format(level))
<file_sep>import sys
import re
import os
import glob
from collections import defaultdict
from copy import deepcopy
import logging
from datetime import datetime
from time import sleep
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
sys.path.append(".")
from utils.utils import Container
# Replace these with your NEJM credentials
nejm_username = "######"
nejm_password = "######"
# Global variables:
out_dir = "../processed_data/crawler/nejm/urls/"
article_dir = "../processed_data/crawler/nejm/articles/"
traverse = True # Whether to get the article urls.
crawl = True # Whether to get the article content.
os.makedirs(out_dir, exist_ok=True)
os.makedirs(article_dir, exist_ok=True)
#####################
# Utility Functions #
#####################
def print_and_log(message):
print(message, flush=True)
logging.info(message)
def detect_dialog_window(driver):
window = driver.find_elements_by_xpath(\
"//div[@class='featherlight-content']")
if window != []:
return True
else:
return False
def close_dialog_window(driver):
close_button = driver.find_element_by_xpath(
"//button[@class='featherlight-close-icon featherlight-close']")
close_button.click()
def nejm_signin(driver):
driver.get("https://www.nejm.org/")
xpath_query = "//a[@data-interactiontype='sign_in_click']"
sign_in_click = driver.find_element_by_xpath(xpath_query)
sign_in_click.click()
login = driver.find_element_by_id("login")
login.send_keys(nejm_username)
password = driver.find_element_by_id("password")
password.send_keys(<PASSWORD>_password)
driver.find_element_by_id("btnSignIn").click()
print("Signed in to NEJM.")
def detect_paywall(driver):
xpath_query = "//a[@class='o-gateway__button o-gateway__button--secondary'" \
" and @data-interactiontype='subscribe_click']"
paywall = driver.find_elements_by_xpath(xpath_query)
if paywall != []:
nejm_signin(driver)
def nejm_signout(driver):
xpath_query = "//a[@data-interactiontype='sign_out_click']"
driver.find_elements_by_xpath(xpath_query)[0].click()
def yxqy_login(driver):
driver.get("https://www.nejmqianyan.cn/index.php?c=week&m=year")
try:
xpath_query = "//a[@href='javascript:;' and @class='dropdown-toggle']"
dropdown = driver.find_element_by_xpath(xpath_query)
dropdown.click()
membername = driver.find_element_by_name("membername")
membername.clear()
membername.send_keys("publicuser")
password = driver.find_element_by_name("password")
password.clear()
password.send_keys("publicuser")
login = driver.find_element_by_class_name(\
"btn.btn-default.fastLoginBtn.login-top")
login.click()
sleep(2)
except:
print("Already logged in to YXQY!")
logged_in = True
def jw_login(driver):
try:
email = driver.find_element_by_xpath(\
"//article-page//input[@id='email_text']")
email.clear()
email.send_keys("[email protected]")
password = driver.find_element_by_xpath(\
"//article-page//input[@id='pwd_text']")
password.clear()
password.send_keys("<PASSWORD>")
login = driver.find_element_by_xpath(\
"//article-page//button")
login.click()
sleep(3)
except:
print("Already logged into Journal Watch!")
logged_in = True
######################
# Crawling Functions #
######################
def crawl_zh_page(driver, article_id, zh_url, out_prefix, verbose=False):
driver.get(zh_url)
print_and_log(f"Crawling Chinese article: {article_id}.")
full_article = driver.find_element_by_id("nejm-article-content").text
full_text = [x.strip() for x in full_article.split("\n")]
with open(f"{out_prefix}.full.zh", "w") as f:
for i in full_text:
f.write(i + "\n")
def crawl_en_page(driver, article_id, en_url, out_prefix, verbose=False):
driver.get(en_url)
if detect_dialog_window(driver):
close_dialog_window(driver)
# Sign in if paywalled.
detect_paywall(driver)
print_and_log(f"Crawling English article: {article_id}.")
article_type = re.sub("[0-9]+", "", article_id).replace("%", "")
print_and_log(f"Article type: {article_type}.")
# Crawl article from NEJM website
if article_type != "jw.na":
sleep(1)
full_article = driver.find_element_by_id("full").text
full_text = [x.strip() for x in full_article.split("\n")]
try:
boxed_text = driver.find_element_by_class_name("m-boxed-text").text
full_text_no_box = [x.strip() for x in \
full_article.replace(boxed_text, "").split("\n")]
print("Found boxed text.")
except:
full_text_no_box = full_text
print("No boxed text.")
# Crawl article from Journal Watch website
else:
try:
WebDriverWait(driver, timeout=60).\
until(EC.presence_of_element_located(\
(By.CLASS_NAME, "article-detail")))
sleep(1)
except:
print("Timeout!")
return
jw_login(driver)
full_article = driver.find_element_by_class_name("article-detail").text
full_text = [x.strip() for x in full_article.split("\n")]
full_text_no_box = full_text
with open(f"{out_prefix}.full.en", "w") as f:
for i in full_text:
f.write(i + "\n")
with open(f"{out_prefix}.nobox.en", "w") as f:
for i in full_text_no_box:
f.write(i + "\n")
def compare_zh_and_en(zh_fn, en_fn, epsilon = 2):
with open(zh_fn, "r") as f_zh, \
open(en_fn, "r") as f_en:
zh = f_zh.readlines()
en = f_en.readlines()
zh = [x for x in zh if x.strip() != ""]
en = [x for x in en if x.strip() != ""]
zh_len, en_len = len(zh), len(en)
if en_len == 0 or zh_len == 0:
comparison = "empty_article"
else:
if en_len / zh_len > epsilon:
comparison = "en_too_long"
elif zh_len / en_len > epsilon:
comparison = "zh_too_long"
else:
comparison = "equal"
return comparison, zh_len, en_len
def crawl_all_urls(driver, container):
total = len([_ for i in container.values() \
for j in i.values() for k in j.values()])
n = 0
for year, month_dict in container.items():
print_and_log("#############")
print_and_log(f"# Year {year} #")
print_and_log("#############")
for month, article_dict in month_dict.items():
os.makedirs(os.path.join(article_dir, year, month), exist_ok=True)
print_and_log("####################")
print_and_log(f"# Crawling {year}/{month} #")
print_and_log("####################")
for article_id, (zh_title, en_title, zh_url, en_url) in article_dict.items():
if n % 100 == 0:
message = f"### Progress: {n}/{total} Articles ###"
print_and_log(message)
message = f"Article: {zh_title}/{en_title}"
print_and_log(message)
out_prefix = f"{article_dir}/{year}/{month}/{article_id}"
zh_out = f"{out_prefix}.full.zh"
en_out = f"{out_prefix}.nobox.en"
# Crawl articles:
if not os.path.exists(zh_out):
crawl_zh_page(driver, article_id, zh_url, out_prefix)
if not os.path.exists(en_out):
crawl_en_page(driver, article_id, en_url, out_prefix)
n += 1
def main():
# Initialize Chrome driver:
chrome_options = Options()
chrome_options.add_argument("--no-sandbox")
chrome_options.add_argument("--headless")
chrome_options.add_argument("--disable-gpu")
chrome_options.add_argument("--remote-debugging-port=9222")
driver = webdriver.Chrome(options=chrome_options)
yxqy_login(driver)
nejm_signin(driver)
# Initialize container:
container = Container()
container.read_from_disk(out_dir)
# Traversing the NEJM website.
if traverse:
container.traverse(driver, out_dir)
# Logging:
if crawl:
log_fn = "{}/article.log".format(article_dir)
logging.basicConfig(filename=log_fn, \
format="%(message)s", level=logging.DEBUG)
crawl_all_urls(driver, container)
nejm_signout(driver)
if __name__ == "__main__":
main()<file_sep>moore=~/software/bilingual-sentence-aligner-modified/
in_dir=/mnt/scratch/boxiang/projects/med_translation/processed_data/alignment/moore/input/
out_dir=/mnt/scratch/boxiang/projects/med_translation/processed_data/alignment/moore/align/
mkdir -p $out_dir
# Align with Moore's algorithm:
cd $moore # Must run in this directory
perl $moore/align-sents-all-multi-file.pl $in_dir 0.5
for f in $in_dir/*snt.aligned; do
echo $f
basename=$(basename $f .snt.aligned)
basename=${basename/_/.align.}
mv $f $out_dir/$basename
done
rm $in_dir/*{words,backtrace,nodes,train}
rm $in_dir/{model-one,sentence-file-pair-list}
rm $out_dir/wmt18.align.{zh,en}
<file_sep>import re
import warnings
import argparse
parser = argparse.ArgumentParser(description="Generate alignment between "\
"source and target in the form of src <=> tgt.")
parser.add_argument("--src_fn", type=str, help="Path to source file.")
parser.add_argument("--tgt_fn", type=str, help="Path to target file.")
parser.add_argument("--out_fn", type=str, help="Path to alignment file.")
args = parser.parse_args()
src_fn = args.src_fn
tgt_fn = args.tgt_fn
out_fn = args.out_fn
# Examples:
# src_fn = "../data/wmt19_biomed_modified/align.tok.mark.ba-s"
# tgt_fn = "../data/wmt19_biomed_modified/align.tok.mark.ba.-t"
# out_fn = "../data/wmt19_biomed_modified/align_bleualign_zh_en.txt"
def extract_markers(sent):
sent = sent.strip()
markers = re.findall("\|\s{0,1}doc[0-9]+,[0-9]+", sent)
for marker in markers:
sent = sent.replace(marker, "")
doc_ids = set([x.split(",")[0].replace("|","").strip() for x in markers])
sent_ids = [x.split(",")[1] for x in markers]
if len(doc_ids) == 1:
doc_ids = doc_ids.pop()
else:
warnings.warn("Doc should be unique.")
doc_ids = ",".join(list(doc_ids))
sent_ids = ",".join(sent_ids)
return sent, doc_ids, sent_ids
with open(src_fn, "r") as f1, open(tgt_fn, "r") as f2, \
open(out_fn, "w+") as fout:
n = 0
for src_sent, tgt_sent in zip(f1, f2):
n += 1
src_sent, src_doc_ids, src_sent_ids = extract_markers(src_sent)
tgt_sent, tgt_doc_ids, tgt_sent_ids = extract_markers(tgt_sent)
if src_doc_ids != tgt_doc_ids:
warnings.warn("Source doc ({}) and target doc ({}) "\
"should be identical.".format(src_doc_ids, tgt_doc_ids))
fout.write("{},{}\t{} <=> {}\t{}\t{}\t{}\n".format(src_doc_ids,
tgt_doc_ids, src_sent_ids, tgt_sent_ids, "AUTO", src_sent,
tgt_sent))
else:
fout.write("{}\t{} <=> {}\t{}\t{}\t{}\n".format(src_doc_ids,
src_sent_ids, tgt_sent_ids, "AUTO", src_sent, tgt_sent))
<file_sep>import argparse
import os
import pandas as pd
pd.options.display.max_columns = 99
import numpy as np
from collections import defaultdict
parser = argparse.ArgumentParser(description="Generate precision-recall "\
"table for sentence alignments.")
parser.add_argument("--align_fn", type=str, help="Path to ground-truth "\
"alignment file.")
parser.add_argument("--en_fn", type=str, help="Path to English sentences.")
parser.add_argument("--zh_fn", type=str, help="Path to Chinese sentences.")
parser.add_argument("--pred_fn", type=str, help="Path to prediction sentence.")
parser.add_argument("--out_fn", type=str, help="Path to output precision "\
"recall table.")
args = parser.parse_args()
os.makedirs(os.path.dirname(args.out_fn), exist_ok=True)
# Example
# args = argparse.Namespace(align_fn="../data/wmt19_biomed_modified/align_validation_zh_en.txt",
# en_fn="../data/wmt19_biomed_modified/medline_zh2en_en.txt",
# zh_fn="../data/wmt19_biomed_modified/medline_zh2en_zh.txt",
# pred_fn="../data/wmt19_biomed_modified/align_bleualign_zh_en.txt",
# out_fn="../processed_data/evaluation/wmt19_biomed/evaluate/bleualign.pr")
def align_en_zh(align, en, zh):
align["zh"] = [x.split(" <=> ")[0] for x in align["align"]]
align["en"] = [x.split(" <=> ")[1] for x in align["align"]]
docs = align.doc.unique()
alignment = defaultdict(list)
for doc in docs:
e = en[en.doc == doc]
z = zh[zh.doc == doc]
a = align[align.doc == doc]
if e.shape[0] == 0 or z.shape[0] == 0:
continue
for i, j, status in \
zip(a["zh"], a["en"], a["status"]):
zh_sent = ""
en_sent = ""
for v in i.split(","):
if v != "omitted":
v = int(v) - 1
zh_sent += z["sent"].iloc[v]
for w in j.split(","):
if w != "omitted":
w = int(w) - 1
en_sent += e["sent"].iloc[w]
alignment["doc"].append(doc)
alignment["align"].append("{} <=> {}".format(i,j))
alignment["status"].append(status)
alignment["zh"].append(zh_sent)
alignment["en"].append(en_sent)
alignment = pd.DataFrame(alignment)
return alignment
def read_data(args):
shape_getter = pd.read_table(args.align_fn, nrows=10)
ncol = shape_getter.shape[1]
print(f"{ncol} columns detected in alignment file.")
if ncol == 3:
align = pd.read_table(args.align_fn, names=["doc", "align", "status"])
elif ncol == 4:
align = pd.read_table(args.align_fn, names=["pmid", "doc", "align", "status"])
else:
raise ValueError(f"Column = {ncol} has not been implemented.")
if args.en_fn is not None and args.zh_fn is not None:
en = pd.read_table(args.en_fn, names=["doc", "sent_id", "sent"])
zh = pd.read_table(args.zh_fn, names=["doc", "sent_id", "sent"])
align = align_en_zh(align, en, zh)
else:
en = None
zh = None
return align, en, zh
def align_type(x):
out = []
for i in x:
if i is np.NaN:
out.append(np.NaN)
else:
src, tgt = i.split(" <=> ")
if src == "omitted":
src_len = 0
else:
src_len = len(src.split(","))
if tgt == "omitted":
tgt_len = 0
else:
tgt_len = len(tgt.split(","))
min_len = min(src_len, tgt_len)
max_len = max(src_len, tgt_len)
out.append("{} - {}".format(min_len, max_len))
return out
def get_precision_recall(valid, pred):
types = valid["type"].unique()
print(f"Alignment types: {types}", flush=True)
def paste(x):
return ":".join([x["doc"], x["align"]])
pr_table = defaultdict(list)
for _type in types:
try:
valid_of_type = valid[valid["type"] == _type].\
apply(lambda x: paste(x), axis=1).tolist()
pred_of_type = pred[pred["type"] == _type].\
apply(lambda x: paste(x), axis=1).tolist()
TP = sum([x in pred_of_type for x in valid_of_type])
FN = sum([x not in pred_of_type for x in valid_of_type])
FP = sum([x not in valid_of_type for x in pred_of_type])
precision = TP / (TP + FP)
recall = TP / (TP + FN)
pr_table["type"].append(_type)
pr_table["precision"].append(precision)
pr_table["recall"].append(recall)
except:
print(f"Type {_type} not found.")
pr_table = pd.DataFrame(pr_table)
return pr_table
def main():
valid, en, zh = read_data(args)
pred = pd.read_table(args.pred_fn,
names=["doc", "align","status", "zh", "en"])
valid["type"] = align_type(valid["align"])
pred["type"] = align_type(pred["align"])
pr_table = get_precision_recall(valid, pred)
pr_table.to_csv(args.out_fn, sep="\t", index=False)
if __name__ == "__main__":
main()
<file_sep>in_dir=../processed_data/preprocess/tokenize/
moses_scripts=~/software/mosesdecoder/scripts/
out_dir=../processed_data/preprocess/truecase/
mkdir -p $out_dir
cat $in_dir/*.filt.en > $out_dir/corpus.en
$moses_scripts/recaser/train-truecaser.perl \
-model $out_dir/truecase-model.en -corpus $out_dir/corpus.en
for f in $in_dir/*.filt.en; do
echo $f
base=$(basename $f)
$moses_scripts/recaser/truecase.perl < $f \
> $out_dir/$base -model $out_dir/truecase-model.en
done
rm $out_dir/{truecase-model.en,corpus.en}
for f in $in_dir/*.filt.zh; do
echo $f
base=$(basename $f)
ln $f $out_dir/$base
done<file_sep>import os
import re
import glob
import sys
import pandas as pd
pd.set_option('display.max_columns', 999)
import matplotlib
import matplotlib.pyplot as plt
sys.path.append(".")
from utils.utils import read_article_urls
in_dir = "../processed_data/crawler/nejm/urls/"
out_dir = "../processed_data/crawler/url_stat/"
os.makedirs(out_dir, exist_ok=True)
abbrev = {"cp": "Clinical Practice",
"oa": "Original Article",
"ra": "Review Article",
"cpc": "Case Records",
"sr": "Special Report",
"ct": "Clinical Therapeutics",
"jw.na": "Journal Watch",
"clde": "Clinical Decisions",
"cps": "Clinical Prob Solving",
"p": "Perspective",
"e": "Editorial",
"cibr": "Clinical Implications\nof Basic Research",
"icm": "Images in Clinical Med",
"ms": "Medicine and Society",
"c": "Correspondence",
"sa": "Special Article",
"x": "Correction",
"hpr": "Health Policy Report"}
# Read article and urls:
articles = read_article_urls(in_dir)
articles = articles[articles["year"] != 2020] # Remove year 2020
# Plot article count by year:
year_count = articles.groupby("year").\
agg(count=pd.NamedAgg("year", "count")).\
reset_index()
fig, (ax1, ax2) = plt.subplots(2,1)
ax1.clear()
ax1.bar("year", "count", data=year_count)
ax1.spines['right'].set_visible(False)
ax1.spines['top'].set_visible(False)
ax1.set_xticks(ticks=year_count["year"])
ax1.set_xticklabels(labels=year_count["year"])
ax1.set_ylabel("No. Articles")
ax1.text(-0.1, 1.15, "A", transform=ax1.transAxes, fontsize=16, fontweight="bold", va="top", ha="right")
# Plot count by article type:
articles["type_abbr"] = articles["id"].apply(lambda x: re.sub("[0-9%]+", "", x))
articles["type"] = articles["type_abbr"].apply(lambda x: abbrev[x])
type_count = articles.groupby("type").\
agg(count=pd.NamedAgg("type", "count")).\
reset_index()
type_count.sort_values(by="count", inplace=True, ascending=False)
ax2.clear()
ax2.bar("type", "count", data=type_count)
ax2.set_xticklabels(labels=type_count["type"], rotation=90, linespacing=0.95)
ax2.semilogy()
ax2.spines['right'].set_visible(False)
ax2.spines['top'].set_visible(False)
ax2.set_ylabel("No. Articles")
ax2.text(-0.1, 1.15, "B", transform=ax2.transAxes, fontsize=16, fontweight="bold", va="top", ha="right")
# Save figure:
fig.set_size_inches(5,5)
fig.tight_layout()
fig.savefig(f"{out_dir}/url_statistics.pdf")<file_sep>#!/bin/bash
# Must be run on GPU nodes:
ONMT=/mnt/home/boxiang/projects/OpenNMT-py
WMT18=../processed_data/translation/wmt18/train/ # Path to the WMT18 baseline model.
NEJM=../processed_data/translation/nejm/finetune/ # Path to model fine-tuned on NEJM.
BPE_DIR=../processed_data/translation/wmt18/train/data/
VALID_DATA=../processed_data/split_data/split_train_test/ # Path to NEJM valid set.
TEST_SRC=$VALID_DATA/nejm.test.zh
TEST_TGT=$VALID_DATA/nejm.test.en
$ONMT/tools/apply_bpe.py -c $BPE_DIR/bpe-codes.zh < $TEST_SRC > $NEJM/test/test.zh
$ONMT/tools/apply_bpe.py -c $BPE_DIR/bpe-codes.en < $TEST_TGT > $NEJM/test/test.en
src=$NEJM/test/test.zh
tgt=$NEJM/test/test.en
# Testing Chinese to English translation:
models=($WMT18/models/zh2en_step_390000.pt \
$NEJM/models/4000/zh2en_step_400000.pt \
$NEJM/models/8000/zh2en_step_400000.pt \
$NEJM/models/16000/zh2en_step_400000.pt \
$NEJM/models/32000/zh2en_step_400000.pt \
$NEJM/models/64000/zh2en_step_400000.pt \
$NEJM/models/93303/zh2en_step_400000.pt)
translations=(wmt18.zh2en \
nejm.4000.zh2en \
nejm.8000.zh2en \
nejm.16000.zh2en \
nejm.32000.zh2en \
nejm.64000.zh2en \
nejm.93303.zh2en)
for i in {0..6}; do
model=${models[$i]}
translation=${translations[$i]}
echo "Translate $translation"
python $ONMT/translate.py \
-batch_size 1 \
-model $model \
-src $src \
-output $NEJM/test/$translation \
-replace_unk -verbose \
-gpu 0 > $NEJM/test/${translation}.log
echo "BPE decoding/detokenising target to match with references"
mv $NEJM/test/$translation{,.bpe}
cat $NEJM/test/$translation.bpe | sed -E 's/(@@ )|(@@ ?$)//g' > $NEJM/test/$translation
echo $NEJM/test/$translation
$ONMT/tools/multi-bleu.perl $TEST_TGT < $NEJM/test/$translation > $NEJM/test/$translation.tc.bleu
done
# Testing English to Chinese translation:
models=($WMT18/models/en2zh_step_500000.pt \
$NEJM/models/4000/en2zh_step_510000.pt \
$NEJM/models/8000/en2zh_step_510000.pt \
$NEJM/models/16000/en2zh_step_510000.pt \
$NEJM/models/32000/en2zh_step_510000.pt \
$NEJM/models/64000/en2zh_step_510000.pt \
$NEJM/models/93303/en2zh_step_510000.pt)
translations=(wmt18.en2zh \
nejm.4000.en2zh \
nejm.8000.en2zh \
nejm.16000.en2zh \
nejm.32000.en2zh \
nejm.64000.en2zh \
nejm.93303.en2zh)
for i in {0..6}; do
model=${models[$i]}
translation=${translations[$i]}
echo "Translate $translation"
python $ONMT/translate.py \
-batch_size 1 \
-model $model \
-src $tgt \
-output $NEJM/test/$translation \
-replace_unk -verbose \
-gpu 0 > $NEJM/test/${translation}.log
echo "BPE decoding/detokenising target to match with references"
mv $NEJM/test/$translation{,.bpe}
cat $NEJM/test/$translation.bpe | sed -E 's/(@@ )|(@@ ?$)//g' > $NEJM/test/$translation
echo $NEJM/test/$translation
$ONMT/tools/multi-bleu.perl $TEST_SRC < $NEJM/test/$translation > $NEJM/test/$translation.tc.bleu
done
<file_sep>import os
import re
import sys
import glob
import pandas as pd
sys.path.append(".")
from utils.utils import Article, get_nltk_sent_tokenizer,\
RegexSentenceTokenizer
article_dir = "../processed_data/preprocess/normalize/"
sentence_dir = "../processed_data/preprocess/sentences/punkt/"
if not os.path.exists(sentence_dir):
os.makedirs(sentence_dir)
def save_sentences(out_fn, article):
ns = len(article.sentences)
with open(out_fn, "w") as f:
for i, sent in enumerate(article.sentences):
f.write(sent + "\n")
# English:
article_paths=glob.glob("{}/*.filt.en".format(article_dir))
nltk_sent_tokenizer = get_nltk_sent_tokenizer(article_paths, lang="en")
regex_sent_tokenizer = RegexSentenceTokenizer(regex="[^0-9]\.[0-9]{1,2}[0-9,-]*?[ \n]")
num_sents = 0
for path in article_paths:
print("Article: {}".format(path), flush=True)
article = Article(path=path,
sent_tokenizers=[nltk_sent_tokenizer, regex_sent_tokenizer],
lang="en")
out_fn = "{}/{}".format(sentence_dir, \
os.path.basename(path))
save_sentences(out_fn, article)
num_sents += len(article.sentences)
print("Total sentences: {}".format(num_sents))
# Total sentences: 135245
# Chinese:
article_paths=glob.glob("{}/*.filt.zh".format(article_dir))
regex_sent_tokenizer = RegexSentenceTokenizer(regex=u"[^!?。]+[!?。]?[“]*?")
num_sents = 0
for path in article_paths:
print("Article: {}".format(path), flush=True)
article = Article(path=path,
sent_tokenizers=[regex_sent_tokenizer],
lang="zh")
out_fn = "{}/{}".format(sentence_dir, \
os.path.basename(path))
save_sentences(out_fn, article)
num_sents += len(article.sentences)
print("Total sentences: {}".format(num_sents))
# Total sentences: 135225
<file_sep>import os
import sys
sys.path.append(".")
from collections import defaultdict
from utils.utils import read_article_urls
url_dir = "../processed_data/crawler/nejm/urls/"
align_dir = "../processed_data/clean/clean/"
out_dir = "../processed_data/split_data/split_train_test/"
os.makedirs(out_dir, exist_ok=True)
def save_aligned_sent(prefix, docs, align):
with open(f"{prefix}.zh", "w") as fzh, \
open(f"{prefix}.en", "w") as fen:
for i in docs:
for sent in align[align["id"] == i]["zh"]:
fzh.write(sent.strip() + "\n")
for sent in align[align["id"] == i]["en"]:
fen.write(sent.strip() + "\n")
print("Reading articles...")
article_urls = read_article_urls(url_dir)
df = defaultdict(list)
with open(f"{align_dir}/all.rm_dup.txt", "r") as f:
for line in f:
split_line = line.split("\t")
df["id"].append(split_line[0])
df["sent"].append(split_line[1])
df["zh"].append(split_line[2])
df["en"].append(split_line[3])
align = pd.DataFrame(df)
print("Counting number of sentences in each article...")
num_sent = align.groupby("id").agg(num_sent=pd.NamedAgg("id","count"))
print("Sorting article by year and month...")
num_sent = pd.merge(article_urls[["year", "month", "id"]], num_sent, on="id")
num_sent = num_sent.sort_values(["year", "month"], ascending=False)
print("Selecting sentences for the test set...")
num_sent["cumsum"] = num_sent["num_sent"].cumsum()
test_sent = num_sent[num_sent["cumsum"] >= 2000]["cumsum"].iloc[0]
test_docs = num_sent[num_sent["cumsum"] <= test_sent]["id"]
# 39 articles
# 2102 sentences
print(f"Writing {test_sent} test sentences...")
save_aligned_sent(f"{out_dir}/nejm.test", test_docs, align)
print("Selecting sentences for the dev set...")
dev_sent = num_sent[num_sent["cumsum"] >= 2000 + test_sent]["cumsum"].iloc[0]
dev_docs = num_sent[(num_sent["cumsum"] <= dev_sent) & (num_sent["cumsum"] > test_sent)]["id"]
# 40 articles
# 2036 sentences
print(f"Writing {dev_sent-test_sent} dev sentences...")
save_aligned_sent(f"{out_dir}/nejm.dev", dev_docs, align)
print(f"Writing training sentences...")
train_docs = num_sent[num_sent["cumsum"] > dev_sent]["id"]
save_aligned_sent(f"{out_dir}/nejm.train", train_docs, align)<file_sep># This will calculate the precision and recall for the following algoriths:
# ba: Bleualign
# ba2: Bleualign with bidirectional translation
# gc: Gale-Church
# moore: Moore's IBM 1 model.
for algo in ba gc moore; do
# This will generate src <=> tgt alignment.
python3 evaluation/wmt19_biomed/gen_align_file.py \
--src_fn ../data/wmt19_biomed_modified/align.tok.mark.${algo}-s \
--tgt_fn ../data/wmt19_biomed_modified/align.tok.mark.${algo}-t \
--out_fn ../data/wmt19_biomed_modified/align_${algo}_zh_en.txt
# Evaluate algorithm:
python3 evaluation/wmt19_biomed/evaluate.py \
--align_fn ../data/wmt19_biomed_modified/align_validation_zh_en.txt \
--en_fn ../data/wmt19_biomed_modified/medline_zh2en_en.txt \
--zh_fn ../data/wmt19_biomed_modified/medline_zh2en_zh.txt \
--pred_fn ../data/wmt19_biomed_modified/align_${algo}_zh_en.txt \
--out_fn ../processed_data/evaluation/wmt19_biomed/evaluate/${algo}.pr
done<file_sep>data=../processed_data/preprocess/sentences/eserix/
moses_scripts=~/software/mosesdecoder/scripts/
out_dir=../processed_data/preprocess/tokenize/
mkdir -p $out_dir
src=zh
tgt=en
for article in `ls $data/*.$tgt`; do
echo $article
base=$(basename $article)
cat $article | \
$moses_scripts/tokenizer/tokenizer.perl -a -l $tgt \
> $out_dir/$base
done
for article in `ls $data/*.$src`; do
echo $article
base=$(basename $article)
python3 -m jieba -d ' ' < $article \
> $out_dir/$base
done<file_sep>from collections import defaultdict
import pandas as pd
nejm_zh2en = "../processed_data/translation/nejm/finetune/test/nejm.93303.zh2en.log"
wmt_zh2en = "../processed_data/translation/nejm/finetune/test/wmt18.zh2en.log"
nejm_en2zh = "../processed_data/translation/nejm/finetune/test/nejm.93303.en2zh.log"
wmt_en2zh = "../processed_data/translation/nejm/finetune/test/wmt18.en2zh.log"
def read_openNMT_translation_log(fn, direction):
assert direction in ["zh2en", "en2zh"], \
"direction must be zh2en or en2zh."
container = defaultdict(list)
with open(fn, "r") as f:
for i, line in enumerate(f):
if line.startswith("SENT"):
if direction == "zh2en":
zh = line.strip().split(":")[1]
zh = zh.replace("['", "").replace("']","").\
replace("', '", "").replace("@@", "")
container["zh"].append(zh)
else:
en = line.strip().split(":")[1]
en = en.replace("['", "").replace("']","").\
replace("', '", " ").replace("@@ ", "")
container["en"].append(en)
container["index"].append(i)
elif line.startswith("PRED SCORE"):
score = float(line.strip().split(":")[1].strip())
container["score"].append(score)
elif line.startswith("PRED AVG SCORE"):
pass
elif line.startswith("PRED"):
text = line.strip().split(":")[1]
text = text.replace("@@ ", "")
if direction == "zh2en":
container["en"].append(text)
else:
container["zh"].append(text)
else:
pass
return pd.DataFrame(container)
nejm = read_openNMT_translation_log(nejm_zh2en, direction="zh2en")
wmt = read_openNMT_translation_log(wmt_zh2en, direction="zh2en")
merged = pd.merge(nejm, wmt, on="index", suffixes=["_nejm", "_wmt"])
merged["score_diff"] = merged.apply(lambda x: x["score_nejm"] - x["score_wmt"], axis=1)
merged = merged.sort_values("score_diff", ascending=False)
# In [92]: merged.iloc[8]["zh_nejm"]
# Out[92]: ' 患者接受铂类-紫杉类药物化疗+贝伐珠单抗一线治疗后,本研究要求其不能有病变迹象,或者在治疗后达到临床完全或部分缓解(定义参见表1).'
# In [93]: merged.iloc[8]["en_nejm"]
# Out[93]: ' patients were required to have no evidence of disease or to have a clinical complete or partial response after treatment after first @-@ line platinum @-@ taxane chemotherapy plus bevacizumab ( as defined in Table 1 ) .'
# In [94]: merged.iloc[8]["en_wmt"]
# Out[94]: ' after Pt @-@ Pseudophyllus drug chemotherapy + Bavaris mono @-@ repellent first @-@ line treatment , the study required that the patient should not show signs of lesion or complete or partial clinical relief after treatment ( see table 1 for definition ) .'
nejm = read_openNMT_translation_log(nejm_en2zh, direction="en2zh")
wmt = read_openNMT_translation_log(wmt_en2zh, direction="en2zh")
merged = pd.merge(nejm, wmt, on="index", suffixes=["_nejm", "_wmt"])
merged["score_diff"] = merged.apply(lambda x: x["score_nejm"] - x["score_wmt"], axis=1)
merged = merged.sort_values("score_diff", ascending=False)
merged.iloc[20]["en_nejm"]
merged.iloc[20]["zh_nejm"]
merged.iloc[20]["zh_wmt"]<file_sep>bleualign=~/projects/Bleualign/bleualign.py
data=/mnt/scratch/boxiang/projects/med_translation/processed_data/evaluation/nejm/align/bleualign/input/
out_dir=/mnt/scratch/boxiang/projects/med_translation/processed_data/evaluation/nejm/align/bleualign/align/
mkdir -p $out_dir
# Bleualign
for threshold in `seq 0 5 100`; do
mkdir -p $out_dir/one_sided/$threshold/
for doc in `ls $data/doc*.zh.mark`; do
$bleualign --factored \
-s $doc \
-t ${doc/.zh./.en.} \
--srctotarget ${doc/.zh./.zh.2en.} \
--filter sentences \
--filterthreshold $threshold \
--filterlang \
--verbosity 1 \
-o ${doc/.zh.mark/.align}
done
mv $data/*align-{s,t} $out_dir/one_sided/$threshold/
cat $out_dir/one_sided/$threshold/doc*.align-s > \
$out_dir/one_sided/$threshold/align.zh
cat $out_dir/one_sided/$threshold/doc*.align-t > \
$out_dir/one_sided/$threshold/align.en
done
# cat $data/doc*.ba-s > $out_dir/ba/align.ba-s
# cat $data/doc*.ba-t > $out_dir/ba/align.ba-t
# rm $data/doc*.ba-{s,t}
# Bleualign (both directions):
for threshold in `seq 0 5 100`; do
mkdir -p $out_dir/two_sided/$threshold/
for doc in `ls $data/doc*.zh.mark`; do
$bleualign --factored \
-s $doc \
-t ${doc/.zh/.en} \
--srctotarget ${doc/.zh./.zh.2en.} \
--targettosrc ${doc/.zh./.en.2zh.} \
--filter sentences \
--filterthreshold $threshold \
--filterlang \
--verbosity 1 \
-o ${doc/.zh.mark/.align}
done
mv $data/*align-{s,t} $out_dir/two_sided/$threshold/
cat $out_dir/two_sided/$threshold/doc*.align-s > \
$out_dir/two_sided/$threshold/align.zh
cat $out_dir/two_sided/$threshold/doc*.align-t > \
$out_dir/two_sided/$threshold/align.en
done
# Gale-Church:
for threshold in `seq 0 5 100`; do
mkdir -p $out_dir/gale_church/$threshold/
for doc in `ls $data/doc*.zh.mark`; do
$bleualign --factored \
-s $doc \
-t ${doc/.zh/.en} \
--srctotarget - \
--galechurch \
--filter sentences \
--filterthreshold $threshold \
--filterlang \
--verbosity 1 \
-o ${doc/.zh.mark/.align}
done
mv $data/*align-{s,t} $out_dir/gale_church/$threshold/
rm $data/*bad-{s,t}
cat $out_dir/gale_church/$threshold/doc*.align-s > \
$out_dir/gale_church/$threshold/align.zh
cat $out_dir/gale_church/$threshold/doc*.align-t > \
$out_dir/gale_church/$threshold/align.en
done<file_sep>import pandas as pd
import os
import matplotlib
matplotlib.use('tkagg')
import matplotlib.pyplot as plt
import seaborn as sns
import glob
import re
from collections import defaultdict
denovo_xfmr_dir = "../processed_data/translation/nejm/train_denovo/test/"
finetune_xfmr_dir = "../processed_data/translation/nejm/finetune/test/"
denovo_lstm_dir = "../processed_data/translation/nejm/train_denovo_rnn/test/"
finetune_lstm_dir = "../processed_data/translation/nejm/finetune_rnn/test/"
out_dir = "../processed_data/translation/nejm/plot_bleu/"
os.makedirs(out_dir, exist_ok=True)
def get_plotting_data(denovo_dir, finetune_dir):
data = ["wmt18", "nejm.4000", "nejm.8000", "nejm.16000", \
"nejm.32000", "nejm.64000", "nejm.93303"]
direction = ["zh2en", "en2zh"]
container = defaultdict(list)
for h, in_dir in [("de novo", denovo_dir), ("finetune", finetune_dir)]:
for i,d in enumerate(data):
for j in direction:
fn = f"{in_dir}/{d}.{j}.tc.bleu"
try:
with open(fn, "r") as f:
line = f.readlines()[0].strip()
bleu = re.search("BLEU = [0-9\.]+", line).group(0).split("=")[1].strip()
bleu = float(bleu)
container["bleu"].append(bleu)
container["data_ord"].append(i)
container["data"].append(d)
container["direction"].append(j)
container["train"].append(h)
except:
print(f"{fn} does not exist.")
bleu = pd.DataFrame(container)
zeros = pd.DataFrame({"bleu":[0, 0], "data_ord": [0, 0], "data": ["nejm.0", "nejm.0"], "direction":["zh2en", "en2zh"], "train": ["de novo", "de novo"]})
bleu = pd.concat([zeros, bleu])
ord2num = {0: 0, 1: 1, 2: 2, 3: 3, 4: 4, 5: 5, 6: 5.46}
bleu["x"] = bleu["data_ord"].apply(lambda x: ord2num[x])
return bleu
bleu_xfmr = get_plotting_data(denovo_xfmr_dir, finetune_xfmr_dir)
bleu_lstm = get_plotting_data(denovo_lstm_dir, finetune_lstm_dir)
bleu_xfmr["model"] = "Transformer"
bleu_lstm["model"] = "LSTM"
bleu = pd.concat([bleu_lstm, bleu_xfmr])
plt.ion()
g = sns.FacetGrid(bleu, col="model", height=4, aspect=1, legend_out=False)
g.map_dataframe(sns.lineplot, x="x", y="bleu", hue="direction", legend="brief", style="train", markers=["o","o"], dashes=[(2,1),""])
g.add_legend()
g.set_titles(col_template="{col_name}")
g.set_axis_labels("In-Domain Sentence Pairs", "1-ref BLEU")
g.set(xticks=[0, 1, 2, 3, 4, 5, 5.46])
g.set(xticklabels=["0", "4000", "8000", "16000", "32000", "64000", ""])
g.tight_layout()
g.savefig(f"{out_dir}/bleu.pdf")
plt.close()<file_sep>import pandas as pd
in_file = "../processed_data/clean/clean/all.rm_dup.txt"
corpus = pd.read_table(in_file, sep="\t", quoting=3, header=None, names=["article", "sentence", "zh", "en"])
# Number of articles
len(corpus["article"].unique())
# 1966
# Number of sentences:
corpus.shape[0]
# 97441
def get_tokens(sentences: list) -> list:
'''
INPUT ARGS
sentences: [List] a list of sentences
'''
tokens = []
for sentence in sentences:
for token in sentence.split(" "):
tokens.append(token)
return tokens
en_tokens = get_tokens(corpus["en"].tolist())
# English
# Number of unique tokens
len(set(en_tokens))
# 55,673
# Number of tokens:
len(en_tokens)
# 3,028,434
# Chinese
zh_tokens = get_tokens(corpus["zh"].tolist())
# Number of unique tokens
len(set(zh_tokens))
# 46,700
# Number of tokens:
len(zh_tokens)
# 2,916,779
def get_avg_length(sentences: list) -> float:
'''
INPUT ARGS:
sentences: [List] a list of sentences
'''
sentence_lengths = []
for sentence in sentences:
tokens = sentence.split(" ")
sentence_lengths.append(len(tokens))
return sum(sentence_lengths) / len(sentence_lengths)
def test_get_avg_length():
sentences = ["a b c", "d e", "s d k l m"]
avg_length = get_avg_length(sentences)
assert avg_length == 10/3
# English sentence length
get_avg_length(corpus["en"].tolist())
# 31.080
# Chinese sentence length
get_avg_length(corpus["zh"].tolist())
# 29.934<file_sep># Must run on asimovbld-1
eserix=/Users/boxiang/Documents/tools/eserix/build/bin/eserix
in_dir=../processed_data/preprocess/normalize/
out_dir=../processed_data/preprocess/sentences/eserix/
rule_fn=/Users/boxiang/Documents/tools/eserix/srx/rules.srx
mkdir -p $out_dir
for lang in en zh; do
for article in `ls $in_dir/*.filt.$lang`; do
echo $article
base=$(basename $article)
out_fn=$out_dir/$base
mkdir -p $out_dir/$sub_dir
echo Language: $lang
echo Output: $out_fn
cat $article | \
$eserix -t -l $lang \
-r $rule_fn > \
$out_fn
done
done<file_sep>import os
import re
import glob
import pandas as pd
pd.set_option('display.max_columns', 999)
import matplotlib
import matplotlib.pyplot as plt
new_dir = "../processed_data/crawler/nejm/urls/"
old_dir = "../processed_data/crawler/nejm/urls_old/"
# Read new urls:
container = []
new_files = glob.glob(f"{new_dir}/*/*.txt")
for fn in new_files:
print(f"Filename: {fn}")
container.append(pd.read_table(fn, header=None))
new = pd.concat(container)
new.columns = ["year", "month", \
"id", "zh_title", "en_title", \
"zh_url", "en_url"]
new = new[new["year"] != 2020] # Remove year 2020
print(f"Total number of new articles: {new.shape[0]}")
# Read old urls:
container = []
old_files = glob.glob(f"{old_dir}/*/*.txt")
for fn in old_files:
print(f"Filename: {fn}")
container.append(pd.read_table(fn, header=None))
old = pd.concat(container)
old.columns = ["zh_title", "zh_url", "en_url"]
print(f"Total number of old articles: {old.shape[0]}")
old["zh_url"].unique().shape
old["en_url"].unique().shape
new["in_old"] = new["zh_url"].apply(lambda x: x in old["zh_url"].tolist())
new[new["in_old"] == False][["year","month"]].drop_duplicates()
# year month
# 0 2019 12
# 0 2019 10
# 0 2019 11
# The new urls missing from the old are all from 10, 11, 12 of 2019.
old["in_new"] = old["zh_url"].apply(lambda x: x in new["zh_url"].tolist())
old[old["in_new"] == False]
# The old urls missing from the new are all "Quick Take", or videos, which
# do not have any text and should not be crawled at all.
# For example: https://www.nejmqianyan.cn/article/YXQYdo005239<file_sep>import os
import re
import sys
import glob
from collections import defaultdict
import pandas as pd
pd.set_option('display.max_columns', 999)
import matplotlib
import matplotlib.pyplot as plt
import seaborn as sns
sys.path.append(".")
from utils.utils import read_article_urls, abbrev
urls_dir = "../processed_data/crawler/nejm/urls/"
in_dir = "../processed_data/crawler/nejm/articles/"
out_dir = "../processed_data/crawler/article_stat/"
os.makedirs(out_dir, exist_ok=True)
# Functions
def get_article_length(in_dir, article_urls, status):
container = defaultdict(lambda: \
{"time": None,
"zh": {"text": None, "len": None},
"en": {"text": None, "len": None},
"zh_m_en": None})
for index, row in article_urls.iterrows():
for lang in ["zh", "en"]:
year = row["year"]
month = row["month"]
article_id = row["id"]
fn = f"{in_dir}/{year}/{month:02}/{article_id}.{status}.{lang}"
print(f"path: {fn}")
try:
with open(fn, "r") as f: text = f.readlines()
length = len(text)
container[article_id]["time"] = (int(year),int(month))
container[article_id][lang]["text"] = text
container[article_id][lang]["len"] = length
if container[article_id]["zh"]["len"] != None and \
container[article_id]["en"]["len"] != None:
container[article_id]["zh_m_en"] = \
container[article_id]["zh"]["len"] - \
container[article_id]["en"]["len"]
except:
print("Article not found.")
article_stat = []
for i, (k, v) in enumerate(container.items()):
article_stat.append(pd.DataFrame({"id": k, "year": \
v["time"][0], "month": v["time"][1], \
"zh_len": v["zh"]["len"], "en_len": v["en"]["len"], \
"zh_m_en": v["zh_m_en"]}, index=[i]))
article_stat = pd.concat(article_stat)
article_stat["type_abbr"] = article_stat["id"].apply(lambda x: re.sub("[0-9%]+", "", x))
article_stat["status"] = status
try:
article_stat["abs_diff"] = article_stat["zh_m_en"].apply(lambda x: abs(x))
except TypeError:
print("NaN found in zh_m_en.")
return article_stat
def p1():
# Make plot data:
plot_data = pd.concat([full_articles, filt_articles])
plot_data["type"] = plot_data["type_abbr"].apply(lambda x: abbrev[x])
plot_data = plot_data.groupby(["type", "status"]).\
agg(mean_diff=pd.NamedAgg("abs_diff", "mean")).reset_index()
plot_data["Filter"] = plot_data["status"].\
apply(lambda x: "Before" if x == "full" else "After")
order = plot_data[plot_data["status"]=="full"].\
sort_values("mean_diff", ascending=False)["type"].tolist()
# Plot:
fig, ax = plt.subplots(1,1)
ax.clear()
sns.barplot(x="type", y="mean_diff", hue="Filter", \
data=plot_data, order=order)
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
ax.set_xticklabels(labels=plot_data["type"], rotation=90, linespacing=0.95)
ax.set_xlabel(None)
ax.set_ylabel("Mean Abs Diff in \n# of Paragraphs")
fig.set_size_inches(5,3)
fig.tight_layout()
fig.savefig(f"{out_dir}/length_difference.pdf")
def p2():
fig, (ax1, ax2) = plt.subplots(1,2)
ax1.scatter(x="zh_len", y="en_len", data=full_articles)
ax2.scatter(x="zh_len", y="en_len", data=filt_articles)
xlim = ylim = ax1.get_ylim()
ax1.plot(xlim, ylim, color="red", linestyle="dashed")
xlim = ylim = ax2.get_ylim()
ax2.plot(xlim, ylim, color="red", linestyle="dashed")
fig.text(0.5, 0.01, "No. Chinese Paragraphs", ha="center")
fig.text(0.0, 0.5, "No. English Paragraphs", va="center", rotation="vertical")
ax1.set_title("Pre-filter")
ax2.set_title("Post-filter")
fig.set_size_inches(5,2.7)
fig.tight_layout()
fig.savefig(f"{out_dir}/length_comparison.pdf")
def p3():
fig, ax = plt.subplots(1,1)
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
ax.scatter(x="zh_len", y="en_len", data=full_articles, color="#7f7f7f")
ax.scatter(x="zh_len", y="en_len", data=filt_articles, color="#1f77b4")
xlim = ylim = ax.get_ylim()
ax.plot(xlim, ylim, color="red", linestyle="dashed")
ax.legend(labels=["Diagonal", "Pre-filter", "Post-filter"])
ax.set_xlabel("No. Chinese Paragraphs")
ax.set_ylabel("No. English Paragraphs")
fig.set_size_inches(4,4)
fig.tight_layout()
fig.savefig(f"{out_dir}/length_comparison_overlay.pdf")
def main():
# Read article and urls:
article_urls = read_article_urls(urls_dir)
article_urls = article_urls[article_urls["year"] != 2020] # Remove year 2020
# 1973 articles
full_articles = get_article_length(in_dir, article_urls, "full")
filt_articles = get_article_length(in_dir, article_urls, "filt")
full_articles["abs_diff"].describe()
# count 1973.000000
# mean 11.831221
# std 19.538525
# min 0.000000
# 25% 1.000000
# 50% 1.000000
# 75% 19.000000
# max 130.000000
filt_articles["abs_diff"].describe()
# count 1973.000000
# mean 0.274709
# std 0.638960
# min 0.000000
# 25% 0.000000
# 50% 0.000000
# 75% 0.000000
# max 5.000000
p1()
p2()
p3()
if __name__ == "__main__":
main()
<file_sep>import glob
import os
in_dir = "../processed_data/preprocess/truecase/"
train_en_fn = "../processed_data/split_data/split_train_test/nejm.train.en"
train_zh_fn = "../processed_data/split_data/split_train_test/nejm.train.zh"
os.makedirs("../processed_data/open_access/open_access/", exist_ok=True)
open_access_en_fn = "../processed_data/open_access/open_access/nejm.train.en"
open_access_zh_fn = "../processed_data/open_access/open_access/nejm.train.zh"
open_access = set()
for fn in glob.glob(f"{in_dir}/jw*.en"):
print(fn)
with open(fn, "r") as f:
for line in f:
open_access.add(line)
for fn in glob.glob(f"{in_dir}/oa*.en"):
print(fn)
with open(fn, "r") as f:
for line in f:
open_access.add(line)
with open(train_en_fn, "r") as fen_in, open(train_zh_fn, "r") as fzh_in, \
open(open_access_en_fn, "w") as fen_out, open(open_access_zh_fn, "w") as fzh_out:
while True:
en = fen_in.readline()
zh = fzh_in.readline()
if en == "": break
if en in open_access:
fen_out.write(en)
fzh_out.write(zh)
<file_sep>import re
import sys
sys.path.append(".")
from utils.utils import read_article_urls
url_dir = "../processed_data/crawler/nejm/urls/"
article_dir = "../processed_data/crawler/nejm/articles/"
def read_article(fn):
with open(fn, "r") as f:
x = f.readlines()
return x
# Stitch two or more sentences into one
# Why? Because sometime a single sentence
# are broken into muliple piece on the website.
def stitch(article, lang):
if lang == "zh":
for i, _ in enumerate(article):
# A line with only numbers (i.e citation)
if re.fullmatch("^[0-9,-]+\n$", article[i]):
if article[i].endswith("\n"):
article[i] = article[i].replace("\n", "")
if article[i-1].endswith("\n"):
article[i-1] = article[i-1].replace("\n", "")
# A line with open a period
if re.fullmatch("^。$", article[i]):
if article[i-1].endswith("\n"):
article[i-1] = article[i-1].replace("\n", "")
elif lang == "en":
for i, _ in enumerate(article):
# A line with a hyperlink
if article[i].strip() == ". opens in new tab":
article[i] = ""
if article[i-1].endswith("\n"):
article[i-1] = article[i-1].replace("\n", "")
full_text = "".join(article)
article = full_text.split("\n")
return article
def filter(article, article_type, lang):
keep = [True] * len(article)
# Remove correspondence and
# image in clinical medicine
if article_type in ["c", "icm"]:
return []
if lang == "zh":
for i, text in enumerate(article):
#############################
# Remove text in the middle #
#############################
# Remove tables and figures
if re.match("图[0-9]{1,2}\.", text):
keep[i] = keep[i+1] = False
elif re.match("表[0-9]{1,2}\.", text):
keep[i] = False
# Remove table captions
elif text.startswith("*") or \
text.startswith("†") or \
text.startswith("‡") or \
text.startswith("§") or \
text.startswith("¶") or \
text.startswith("‖") or \
text.startswith("|"):
keep[i] = False
# Remove empty lines
elif text.strip() == "":
keep[i] = False
######################
# Remove text before #
######################
if article_type == "clde":
if text.strip() == "案例摘要" or \
text.strip() == "病例摘要":
for j in range(i):
keep[j] = False
#####################
# Remove text after #
#####################
if article_type == "jw.na": # Journal Watch
if text.startswith("出版时") or \
text.startswith("引文"):
for j in range(i, len(keep)):
keep[j] = False
break
# Original Article
# Review Article
# Case Records
# Perspective
# Editorial
# Clinical Problem solving
# Clinical Implications of Basic Research
# Special report
# Special article
# Clinical therapeutics
# Health policy report
# Clinical practice
# Medicine and Society
elif article_type in ["oa", "ra", "cpc", "p", "ms",\
"e", "cps", "cibr", "sr", "sa", "ct", "hpr", "cp"] :
if text.startswith("Disclosure") or \
text.startswith("译者") or \
text.startswith("作者信息"):
for j in range(i, len(keep)):
keep[j] = False
break
# Corrections
elif article_type == "x":
if text.startswith("译者"):
for j in range(i, len(keep)):
keep[j] = False
break
# Clinical Decisions
elif article_type == "clde":
if text.startswith("选项2"):
for j in range(i, len(keep)):
keep[j] = False
break
elif lang == "en":
for i, text in enumerate(article):
#############################
# Remove text in the middle #
#############################
# Remove Table and Figure
if re.match("Table [0-9]{1,2}\.", text) or \
re.match("Figure [0-9]{1,2}\.", text):
keep[i] = keep[i+1] = False
# Remove video and audio interviews:
elif text.strip() == "Video" or \
text.strip() == "Interactive Graphic":
keep[i] = keep[i+1] = False
# Audio interview:
elif text.strip() == "Audio Interview":
keep[i] = keep[i+1] = keep[i+2] = False
# Remove QUICK TAKE:
elif text.strip() == "QUICK TAKE":
keep[i] = keep[i+1] = keep[i+2] = keep[i+3] = keep[i+4] = False
# Remove VISUAL ABSTRACT:
elif text.strip() == "VISUAL ABSTRACT":
keep[i] = keep[i+1] = keep[i+2] = False
# Remove intro and other text:
elif text.strip() == "Letters" or \
text.strip() == "Download" or \
text.strip() == "Audio Full Text" or \
text.strip() == "Key Clinical Points" or \
text.strip() == "Poll" or \
text.startswith("Comments open through") or \
text.startswith("Citing Article") or \
text.startswith("Option 1") or \
text.startswith("Option 2") or \
re.match("^[0-9]+ Reference", text) or \
re.match("^[0-9]+ Citing Article", text) or \
re.match("^[0-9]+ Comment", text):
keep[i] = False
# Remove sign-ups
elif text.startswith("Sign up for"):
keep[i] = False
elif text.strip() == "":
keep[i] = False
######################
# Remove text before #
######################
if article_type == "jw.na":
for j in range(5): # Remove first 5 lines
keep[j] = False
elif article_type == "oa": # Original Article
if text.strip() == "Abstract":
for j in range(i):
keep[j] = False
elif article_type == "cpc": # Case Records
if text.strip() == "Presentation of Case":
for j in range(i):
keep[j] = False
#####################
# Remove text after #
#####################
if article_type == "jw.na":
if text.startswith("EDITOR DISCLOSURES AT TIME OF PUBLICATION") or \
text.startswith("CITATION"):
for j in range(i, len(keep)):
keep[j] = False
break
# Original Article
# Review Article
# Case Records
# Perspective
# Editorial
# Clinical Problem Solving
# Clinical Implications of Basic Research
# Special report
# Clinical decision
# Special article
# Clinical Therapeutics
# Health policy report
# Clinical Practice
# Medicine and Society
elif article_type in ["oa", "ra", "cpc", "p", "e", "ms",\
"cps", "cibr", "sr", "clde", "sa", "ct", "hpr", "cp"]:
if text.startswith("Disclosure"):
for j in range(i, len(keep)):
keep[j] = False
break
# Output to disk
article_filt = []
for a, k in zip(article, keep):
if k == True:
article_filt.append(a)
return article_filt
def main():
meta = read_article_urls(url_dir)
meta = meta[meta["year"] != 2020] # Remove year 2020
for index, row in meta.iterrows():
year = row["year"]
month = row["month"]
article_id = row["id"]
article_type = re.sub("[0-9%]+", "", article_id)
zh_fn = f"{article_dir}/{year}/{month:02}/{article_id}.full.zh"
en_fn = f"{article_dir}/{year}/{month:02}/{article_id}.nobox.en"
print(f"path: {zh_fn}")
zh_article = read_article(zh_fn)
zh_article = stitch(zh_article, "zh")
zh_article = filter(zh_article, article_type, "zh")
print(f"path: {en_fn}")
en_article = read_article(en_fn)
en_article = stitch(en_article, "en")
en_article = filter(en_article, article_type, "en")
intersect = set(zh_article).intersection(set(en_article))
zh_out_fn = zh_fn.replace(".full.", ".filt.")
with open(zh_out_fn, "w") as f:
for line in zh_article:
if line not in intersect:
f.write(line + "\n")
en_out_fn = en_fn.replace(".nobox.",".filt.")
with open(en_out_fn, "w") as f:
for line in en_article:
if line not in intersect:
f.write(line + "\n")
if __name__ == "__main__":
main()<file_sep>############
# Crawling #
############
# Crawl NEJM websites:
python3 crawler/crawl.py
# Get article type distribution:
python3 crawler/url_stat.py
# Filter unwanted texts:
python3 crawler/filter.py
# Get length difference before and after filtering:
python3 crawler/article_stat.py
#################
# Preprocessing #
#################
# Preprocess NEJM articles
# Step 1:
# Turn English sentences into lower case and normalize
# punctuations, also remove:
bash preprocess/normalize.sh
# Step 2:
# Split paragraphs into sentences:
bash preprocess/detect_sentences/eserix.sh
python3 preprocess/detect_sentences/punkt.py
python3 preprocess/detect_sentences/sent_stat.py
# Step 3:
# Tokenize sentences and change case:
bash preprocess/tokenize.sh
bash preprocess/lowercase.sh
bash preprocess/truecase.sh
# Step 4:
# Manually align:
bash preprocess/manual_align/copy.sh
bash preprocess/manual_align/align.sh
##################
# WMT18 baseline #
##################
# Build a WMT18 baseline model.
# Data: WMT18 news translation shared task
# Model: default transformer
# Train zh -> en on WMT18 BPE data:
# Do not run (only run on GPU nodes)
bash translation/wmt18/train.sh
##############
# Evaluation #
##############
#---- Abstracts -----#
# Create a manually aligned gold-standard based on
# WMT19 Biomedical translation shared task to
# evaluate bleualign with WMT18 baseline model:
bash evaluation/wmt19_biomed/modifications.sh
# This will do necessary preprocessing (segmentation, tokenization, BPE)
# and generate sentence-to-sentence translation. Additionally, it will
# also mark each sentence with doc#,# markers.
bash evaluation/wmt19_biomed/translate.sh
# Here I align with Bleualign, Gale-Church, and Moore's IBM 1 model.
bash evaluation/wmt19_biomed/align.sh
# Evaluate precision and recall for different algorithms:
bash evaluation/wmt19_biomed/evaluate.sh
#------ NEJM articles -------#
# Align with Moore's algorithm:
bash evaluation/nejm/align/moore/input.sh
bash evaluation/nejm/align/moore/align.sh
# Align with Hunalign:
bash evaluation/nejm/align/hunalign/input.sh
bash evaluation/nejm/align/hunalign/align.sh
# Align with Bleualign:
bash evaluation/nejm/align/bleualign/input.sh
bash evaluation/nejm/align/bleualign/translate.sh
bash evaluation/nejm/align/bleualign/align.sh
# Evaluate precision and recall for different algorithms:
bash evaluation/nejm/evaluate.sh
# Visually compare Precision-Recall across methods:
python3 evaluation/nejm/vis_pr_curve.py
#####################
# Machine Alignment #
#####################
# Use Moore's algorithm to align:
bash alignment/moore/input.sh
bash alignment/moore/align.sh
#########################
# Crowdsource Alignment #
#########################
# Prepare articles
python3 crowdsource/prep_articles.py
############
# Clean up #
############
# Use bifixer to remove duplicate sentences:
# Don't run (need docker container)
bash clean/concat.sh
bash clean/clean.sh
##############
# Split data #
##############
# Split data into training, dev, test:
bash split_data/split_train_test.py
###############
# Translation #
###############
# Subset the data:
python3 subset/subset.py
# Fine-tune on NEJM dataset:
bash translation/nejm/finetune.sh
bash translation/nejm/test_finetune.sh
# Train on NEJM from scratch:
bash translation/nejm/train_denovo.sh
bash translation/nejm/test_denovo.sh
# Plot bleu score:
python3 translation/nejm/plot_bleu.py
# Do the above things for a LSTM model:
# Fine-tune on NEJM dataset:
bash translation/nejm/finetune_rnn.sh
bash translation/nejm/test_finetune_rnn.sh
# Train on NEJM from scratch:
bash translation/nejm/train_denovo_rnn.sh
bash translation/nejm/test_denovo_rnn.sh
# Plot bleu score:
python3 translation/nejm/plot_bleu_rnn.py
#################
# Visualization #
#################
# Make table 1
python3 visulization/tables/tab1/tab1.py<file_sep># Concatenate sentences from alignment by Moore
in_dir=../processed_data/alignment/moore/align/
out_dir=../processed_data/clean/concat/
mkdir -p $out_dir
echo "$(ls $in_dir/*zh | wc -l | cut -d " " -f1) articles found in $in_dir"
[[ -f $out_dir/all.align.txt ]] && rm $out_dir/all.align.txt
for zh in $in_dir/*zh; do
echo "Article: $zh"
doc=$(basename $zh .align.zh)
length=$(wc -l $zh | cut -d " " -f1)
echo "$length lines in total"
en=${zh/.zh/.en}
[[ -f /tmp/doc.txt ]] && rm /tmp/doc.txt
[[ -f /tmp/sent.txt ]] && rm /tmp/sent.txt
echo "Creating doc ID and sentence number"
for i in `seq 1 $length`; do
echo $doc >> /tmp/doc.txt
echo $i >> /tmp/sent.txt
done
echo "Concatenating Chinese and English articles"
paste /tmp/doc.txt /tmp/sent.txt $zh $en >> $out_dir/all.align.txt
done<file_sep>import os
import re
import sys
import glob
from collections import defaultdict
import pandas as pd
pd.set_option('display.max_columns', 999)
import matplotlib
import matplotlib.pyplot as plt
import seaborn as sns
sys.path.append(".")
from utils.utils import read_article_urls, abbrev
urls_dir = "../processed_data/crawler/nejm/urls/"
base_dir = "../processed_data/preprocess/sentences/"
punkt_dir = f"{base_dir}/punkt/"
eserix_dir = f"{base_dir}/eserix/"
out_dir = "../processed_data/preprocess/sent_stat/"
os.makedirs(out_dir, exist_ok=True)
def get_article_length(in_dir, article_urls, status):
container = defaultdict(lambda: \
{"time": None,
"zh": {"text": None, "len": None},
"en": {"text": None, "len": None},
"zh_m_en": None})
for index, row in article_urls.iterrows():
for lang in ["zh", "en"]:
year = row["year"]
month = row["month"]
article_id = row["id"]
fn = f"{in_dir}/{article_id}.{status}.{lang}"
print(f"path: {fn}")
try:
with open(fn, "r") as f: text = f.readlines()
length = len(text)
container[article_id]["time"] = (int(year),int(month))
container[article_id][lang]["text"] = text
container[article_id][lang]["len"] = length
if container[article_id]["zh"]["len"] != None and \
container[article_id]["en"]["len"] != None:
container[article_id]["zh_m_en"] = \
container[article_id]["zh"]["len"] - \
container[article_id]["en"]["len"]
except:
print("Article not found.")
article_stat = []
for i, (k, v) in enumerate(container.items()):
article_stat.append(pd.DataFrame({"id": k, "year": \
v["time"][0], "month": v["time"][1], \
"zh_len": v["zh"]["len"], "en_len": v["en"]["len"], \
"zh_m_en": v["zh_m_en"]}, index=[i]))
article_stat = pd.concat(article_stat)
article_stat["type_abbr"] = article_stat["id"].apply(lambda x: re.sub("[0-9%]+", "", x))
article_stat["status"] = status
try:
article_stat["abs_diff"] = article_stat["zh_m_en"].apply(lambda x: abs(x))
except TypeError:
print("NaN found in zh_m_en.")
return article_stat
def p1():
p1_data = pd.merge(punkt[["id", "abs_diff"]], eserix[["id", "abs_diff"]], \
on="id", suffixes=["_punkt", "_eserix"])
p1_data = pd.melt(p1_data, id_vars=["id"], \
value_name="abs_diff", var_name="tokenizer")
fig, ax = plt.subplots(1,1)
ax.clear()
sns.boxplot(x="tokenizer", y="abs_diff", data=p1_data, showfliers=False)
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
ax.set_xlabel("Sentence Tokenizer")
ax.set_ylabel("|No. zh sent. - No. en sent.|")
ax.set_xticklabels(["punkt", "eserix"])
fig.set_size_inches(3,3)
fig.tight_layout()
fig.savefig(f"{out_dir}/cmp_punkt_eserix.pdf")
# Plot:
def punkt_plot():
fig, ax = plt.subplots(1,1)
ax.clear()
plt.scatter(x="zh_len", y="en_len", data=punkt)
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
ax.set_xlabel("Chinese Sentence Length")
ax.set_ylabel("English Sentence Length")
xlim = ylim = ax.get_xlim()
plt.plot(xlim, ylim, color="red", linestyle="dashed")
fig.set_size_inches(3,3)
fig.tight_layout()
fig.savefig(f"{out_dir}/punkt.pdf")
def eserix_plot():
fig, ax = plt.subplots(1,1)
ax.clear()
plt.scatter(x="zh_len", y="en_len", data=eserix)
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
ax.set_xlabel("Chinese Sentence Length")
ax.set_ylabel("English Sentence Length")
xlim = ylim = ax.get_xlim()
plt.plot(xlim, ylim, color="red", linestyle="dashed")
fig.set_size_inches(3,3)
fig.tight_layout()
fig.savefig(f"{out_dir}/eserix.pdf")
# Read article and urls:
article_urls = read_article_urls(urls_dir)
article_urls = article_urls[article_urls["year"] != 2020] # Remove year 2020
punkt = get_article_length(punkt_dir, article_urls, "filt")
punkt["abs_diff"].describe()
# count 1973.000000
# mean 2.638621
# std 3.738087
# min 0.000000
# 25% 1.000000
# 50% 1.000000
# 75% 3.000000
# max 38.000000
eserix = get_article_length(eserix_dir, article_urls, "filt")
eserix["abs_diff"].describe()
# count 1973.000000
# mean 1.509377
# std 2.853513
# min 0.000000
# 25% 0.000000
# 50% 0.000000
# 75% 2.000000
# max 27.000000
p1()
punkt_plot()
eserix_plot()<file_sep># Adding parallel corpora for IBM 1 model construction.
sent_dir=../processed_data/preprocess/manual_align/input/
out_dir=../processed_data/evaluation/nejm/align/moore/input/
[[ ! -f $out_dir ]] && mkdir -p $out_dir
head -n 100000 /mnt/data/boxiang/wmt18/train/corpus.zh > \
$out_dir/wmt18_zh.snt
head -n 100000 /mnt/data/boxiang/wmt18/train/corpus.en > \
$out_dir/wmt18_en.snt
for f in $sent_dir/doc*; do
echo $f
base=$(basename $f)
stem=${base%.*}
base=${base/./_}.snt
awk '{print $0" | "v1","NR}' \
v1=$stem $f > \
$out_dir/$base
done<file_sep>#!/usr/bin/env python
import os
in_dir = "/mnt/scratch/boxiang/projects/med_translation/"\
"processed_data/preprocess/sentences/"
out_dir = "/mnt/scratch/boxiang/projects/med_translation/"\
"processed_data/alignment/crowdsource/prep_articles/"
os.makedirs(out_dir, exist_ok=True)
articles = ["鼻咽癌的吉西他滨联合顺铂诱导化疗",
"饮水可对饮用含糖饮料产生多大程度的对抗作用",
"帕妥珠单抗和曲妥珠单抗辅助治疗早期HER2阳性乳腺癌",
"转移性去势抵抗性前列腺癌的恩杂鲁胺耐药",
"婴儿B群链球菌疾病预防指南更新",
"黑种人理发店可帮助顾客降血压",
"内科患者应用阿哌沙班和依诺肝素预防血栓形成的比较",
"尼拉帕尼用于铂类敏感型复发性卵巢癌的维持治疗",
"膀胱切除术的最佳手术方法:开放式手术与机器人辅助手术的比较",
"1型糖尿病患者胰岛素治疗中加用sotagliflozin的效果",
"HIV相关癌症和疾病",
"2017年慢性阻塞性肺疾病诊断和治疗的GOLD指南"]
def add_line_num(in_fn, out_fn):
with open(in_fn, "r") as fin, open(out_fn, "w+") as fout:
num = 0
for line in fin:
num += 1
line = f"【{num}】\t{line}\n"
fout.write(line)
for article in articles:
print(article, flush=True)
en_in = f"{in_dir}/{article}.en"
zh_in = f"{in_dir}/{article}.zh"
en_out = f"{out_dir}/{article}.en"
zh_out = f"{out_dir}/{article}.zh"
add_line_num(en_in, en_out)
add_line_num(zh_in, zh_out)
<file_sep>data=../processed_data/preprocess/truecase/
out_dir=../processed_data/alignment/moore/input/
mkdir -p $out_dir/
# Link NEJM articles:
for f in $data/*; do
echo $f
base=$(basename $f)
# Check if file is not empty:
if [[ -s $f ]]; then
base=${base/.filt./_}.snt
ln $f $out_dir/$base
fi
done
# Adding parallel corpora for IBM 1 model construction.
head -n 100000 /mnt/data/boxiang/wmt18/train/corpus.zh > \
$out_dir/wmt18_zh.snt
head -n 100000 /mnt/data/boxiang/wmt18/train/corpus.en > \
$out_dir/wmt18_en.snt<file_sep># Use Bifixer to remove duplicated sentences from
# alignment by Moore's algorithm.
in_dir=../processed_data/clean/concat/
out_dir=../processed_data/clean/clean/
ssh asimovbld-1
docker run -it -d -v /mnt/scratch/boxiang/projects/med_translation/processed_data/clean/concat:/var/input/ paracrawl/bitextor
docker exec -it 57cff38ed689 "/bin/bash"
bifixer=/opt/bitextor/bifixer/bifixer/bifixer.py
input=/var/input/all.align.txt
output=/var/input/all.bifixer.txt
srclang=zh
tgtlang=en
pip3 install unidecode
pip3 install xxhash
python3.6 $bifixer \
--scol 3 \
--tcol 4 \
--aggressive_dedup \
$input $output $srclang $tgtlang
# Exit docker container:
exit
mv $in_dir/all.bifixer.txt $out_dir/all.bifixer.txt
# Remove duplicated sentences
python3 clean/rm_dup.py \
$out_dir/all.bifixer.txt \
$out_dir/all.rm_dup.txt
# Remove the hash and the score columns:
mv $out_dir/all.rm_dup.txt temp
cut -f1-4 temp > $out_dir/all.rm_dup.txt
rm temp<file_sep>#!/usr/bin/env python
# Subset the NEJM corpus to 4k, 8k, 16k, 32k,
# 64k sentence pairs.
import os
from collections import defaultdict
in_dir = "../processed_data/split_data/split_train_test/"
src_fn = f"{in_dir}/train.zh"
tgt_fn = f"{in_dir}/train.en"
out_dir = "../processed_data/subset/subset/"
os.makedirs(out_dir, exist_ok=True)
with open(src_fn, "r") as f:
N = len(f.readlines())
subset = [4000, 8000, 16000, 32000, 64000, N]
# Open file handles:
out_fh = defaultdict(dict)
for n in subset:
for side in ["zh", "en"]:
fn = f"{out_dir}/nejm.train.{n}.{side}"
out_fh[n][side] = open(fn, "w")
with open(src_fn, "r") as fsrc, open(tgt_fn, "r") as ftgt:
for i in range(N):
src = next(fsrc)
tgt = next(ftgt)
for n in subset:
if i < n:
if i % 4000 == 0:
print(f"##### Line {i} #####.")
print(f"Writing to {out_dir}/nejm.train.{n}.{side}.")
out_fh[n]["zh"].write(src)
out_fh[n]["en"].write(tgt)
# Close file handles:
for n in subset:
for side in ["zh", "en"]:
out_fh[n][side].close()
<file_sep>#!/usr/bin/env bash
# Author : <NAME>, Modified by <NAME>
# Created : Nov 05, 2019
ONMT=/mnt/home/boxiang/projects/OpenNMT-py
FROM=../processed_data/translation/wmt18/train_rnn/
OUT=../processed_data/translation/nejm/finetune_rnn/
TRAIN_DATA=../processed_data/subset/subset/
VALID_DATA=../processed_data/split_data/split_train_test/
BPE_DIR=../processed_data/translation/wmt18/train_rnn/data/
python=/mnt/home/boxiang/software/anaconda2/envs/py3/bin/python
for n in 4000 8000 16000 32000 64000 93303; do
echo "Subset of $n sentence pairs."
TRAIN_SRC=$TRAIN_DATA/nejm.train.$n.zh
TRAIN_TGT=$TRAIN_DATA/nejm.train.$n.en
VALID_SRC=$VALID_DATA/nejm.dev.zh
VALID_TGT=$VALID_DATA/nejm.dev.en
TEST_SRC=$VALID_DATA/nejm.test.zh
TEST_TGT=$VALID_DATA/nejm.test.en
echo "Output dir = $OUT"
mkdir -p $OUT/data/$n/{zh2en,en2zh}
mkdir -p $OUT/models/$n/
mkdir -p $OUT/test/$n/
echo "Step 1a: Preprocess inputs"
echo "BPE on source"
$ONMT/tools/apply_bpe.py -c $BPE_DIR/bpe-codes.zh < $TRAIN_SRC > $OUT/data/$n/train.$n.zh
$ONMT/tools/apply_bpe.py -c $BPE_DIR/bpe-codes.zh < $VALID_SRC > $OUT/data/$n/valid.zh
$ONMT/tools/apply_bpe.py -c $BPE_DIR/bpe-codes.zh < $TEST_SRC > $OUT/data/$n/test.zh
$ONMT/tools/apply_bpe.py -c $BPE_DIR/bpe-codes.en < $TRAIN_TGT > $OUT/data/$n/train.$n.en
$ONMT/tools/apply_bpe.py -c $BPE_DIR/bpe-codes.en < $VALID_TGT > $OUT/data/$n/valid.en
$ONMT/tools/apply_bpe.py -c $BPE_DIR/bpe-codes.en < $TEST_TGT > $OUT/data/$n/test.en
echo "Step 1b: Preprocess"
# zh -> en
$python $ONMT/preprocess.py \
-src_seq_length 999 \
-tgt_seq_length 999 \
-train_src $OUT/data/$n/train.$n.zh \
-train_tgt $OUT/data/$n/train.$n.en \
-valid_src $OUT/data/$n/valid.zh \
-valid_tgt $OUT/data/$n/valid.en \
-save_data $OUT/data/$n/zh2en/processed \
-overwrite
# en -> zh
$python $ONMT/preprocess.py \
-src_seq_length 999 \
-tgt_seq_length 999 \
-train_src $OUT/data/$n/train.$n.en \
-train_tgt $OUT/data/$n/train.$n.zh \
-valid_src $OUT/data/$n/valid.en \
-valid_tgt $OUT/data/$n/valid.zh \
-save_data $OUT/data/$n/en2zh/processed \
-overwrite
echo "Step 2: Train"
echo "Chinese to English"
echo "Creating hard link for wmt18 baseline model."
ln $FROM/models/zh2en_step_80000.pt $OUT/models/$n/zh2en_step_80000.pt
$python restartsub.py 2080Ti 1 zh2en_${n} \
"$python $ONMT/train.py \
-data $OUT/data/$n/zh2en/processed \
-save_model $OUT/models/$n/zh2en \
-layers 1 \
-rnn_type LSTM \
-rnn_size 512 \
-word_vec_size 512 \
-train_steps 100000 \
-batch_size 4000 \
-batch_type tokens \
-normalization tokens \
-optim adam \
-learning_rate 0.001 \
-label_smoothing 0.1 \
-valid_steps 2000 \
-save_checkpoint_steps 2000 \
-world_size 1 \
-gpu_ranks 0 \
-seed 42" | \
tee $OUT/models/$n/zh2en_restartsub.log &
echo "English to Chinese"
echo "Creating hard link for wmt18 baseline model."
ln $FROM/models/en2zh_step_80000.pt $OUT/models/$n/en2zh_step_80000.pt
$python restartsub.py 2080Ti 1 en2zh_${n} \
"$python $ONMT/train.py \
-data $OUT/data/$n/en2zh/processed \
-save_model $OUT/models/$n/en2zh \
-layers 1 \
-rnn_type LSTM \
-rnn_size 512 \
-word_vec_size 512 \
-train_steps 100000 \
-batch_size 4000 \
-batch_type tokens \
-normalization tokens \
-optim adam \
-learning_rate 0.001 \
-label_smoothing 0.1 \
-valid_steps 2000 \
-save_checkpoint_steps 2000 \
-world_size 1 \
-gpu_ranks 0 \
-seed 42" | \
tee $OUT/models/$n/en2zh_restartsub.log &
#===== EXPERIMENT END ======
done<file_sep># Moore's algorithm (IBM 1):
moore=~/software/bilingual-sentence-aligner-modified/
data=/mnt/scratch/boxiang/projects/med_translation/processed_data/evaluation/nejm/align/moore/input/
out_dir=/mnt/scratch/boxiang/projects/med_translation/processed_data/evaluation/nejm/align/moore/align/
mkdir -p $out_dir
# Moore's algorithm is modified to allow factored sentences.
# One can factor sentences by the pipe "|" character. Only the
# first factor will be used in alignment
cd $moore # Must run in this directory
# `seq 0 0.05 0.95 `seq 0.955 0.005 0.995``
for threshold in `seq 0.985 0.005 0.995`; do
echo $threshold
perl $moore/align-sents-all-multi-file.pl $data $threshold
mkdir -p $out_dir/$threshold/
mv $data/doc*aligned $out_dir/$threshold/
rm $data/*{words,backtrace,nodes,train}
rm $data/{model-one,sentence-file-pair-list}
cat $out_dir/$threshold/doc*_zh.snt.aligned > $out_dir/$threshold/align.zh
cat $out_dir/$threshold/doc*_en.snt.aligned > $out_dir/$threshold/align.en
done
<file_sep># This section creates the alignment file.
# Use the following commands to open Chinese and English docs side-by-side:
sent_dir=../processed_data/preprocess/manual_align/input/
awk '{print NR,$0}' $sent_dir/doc1.zh | vim -
awk '{print NR,$0}' $sent_dir/doc1.en | vim -
awk '{print NR,$0}' $sent_dir/doc2.zh | vim -
awk '{print NR,$0}' $sent_dir/doc2.en | vim -
awk '{print NR,$0}' $sent_dir/doc3.zh | vim -
awk '{print NR,$0}' $sent_dir/doc3.en | vim -
awk '{print NR,$0}' $sent_dir/doc4.zh | vim -
awk '{print NR,$0}' $sent_dir/doc4.en | vim -
awk '{print NR,$0}' $sent_dir/doc5.zh | vim -
awk '{print NR,$0}' $sent_dir/doc5.en | vim -
awk '{print NR,$0}' $sent_dir/doc6.zh | vim -
awk '{print NR,$0}' $sent_dir/doc6.en | vim -
awk '{print NR,$0}' $sent_dir/doc7.zh | vim -
awk '{print NR,$0}' $sent_dir/doc7.en | vim -
awk '{print NR,$0}' $sent_dir/doc8.zh | vim -
awk '{print NR,$0}' $sent_dir/doc8.en | vim -
awk '{print NR,$0}' $sent_dir/doc9.zh | vim -
awk '{print NR,$0}' $sent_dir/doc9.en | vim -
awk '{print NR,$0}' $sent_dir/doc10.zh | vim -
awk '{print NR,$0}' $sent_dir/doc10.en | vim -
awk '{print NR,$0}' $sent_dir/doc11.zh | vim -
awk '{print NR,$0}' $sent_dir/doc11.en | vim -
awk '{print NR,$0}' $sent_dir/doc12.zh | vim -
awk '{print NR,$0}' $sent_dir/doc12.en | vim -
# NOTE: The results are placed in ../processed_data/preprocess/manual_align/alignment/align.txt
<file_sep>in_dir=../processed_data/preprocess/archive/sentences/
out_dir=../processed_data/preprocess/manual_align/input/
mkdir -p $out_dir
cp $in_dir/鼻咽癌的吉西他滨联合顺铂诱导化疗.zh.tok $out_dir/doc1.zh
cp $in_dir/鼻咽癌的吉西他滨联合顺铂诱导化疗.en.tok $out_dir/doc1.en
cp $in_dir/饮水可对饮用含糖饮料产生多大程度的对抗作用.zh.tok $out_dir/doc2.zh
cp $in_dir/饮水可对饮用含糖饮料产生多大程度的对抗作用.en.tok $out_dir/doc2.en
cp $in_dir/帕妥珠单抗和曲妥珠单抗辅助治疗早期HER2阳性乳腺癌.zh.tok $out_dir/doc3.zh
cp $in_dir/帕妥珠单抗和曲妥珠单抗辅助治疗早期HER2阳性乳腺癌.en.tok $out_dir/doc3.en
cp $in_dir/转移性去势抵抗性前列腺癌的恩杂鲁胺耐药.zh.tok $out_dir/doc4.zh
cp $in_dir/转移性去势抵抗性前列腺癌的恩杂鲁胺耐药.en.tok $out_dir/doc4.en
cp $in_dir/婴儿B群链球菌疾病预防指南更新.zh.tok $out_dir/doc5.zh
cp $in_dir/婴儿B群链球菌疾病预防指南更新.en.tok $out_dir/doc5.en
cp $in_dir/黑种人理发店可帮助顾客降血压.zh.tok $out_dir/doc6.zh
cp $in_dir/黑种人理发店可帮助顾客降血压.en.tok $out_dir/doc6.en
cp $in_dir/内科患者应用阿哌沙班和依诺肝素预防血栓形成的比较.zh.tok $out_dir/doc7.zh
cp $in_dir/内科患者应用阿哌沙班和依诺肝素预防血栓形成的比较.en.tok $out_dir/doc7.en
cp $in_dir/尼拉帕尼用于铂类敏感型复发性卵巢癌的维持治疗.zh.tok $out_dir/doc8.zh
cp $in_dir/尼拉帕尼用于铂类敏感型复发性卵巢癌的维持治疗.en.tok $out_dir/doc8.en
cp $in_dir/膀胱切除术的最佳手术方法:开放式手术与机器人辅助手术的比较.zh.tok $out_dir/doc9.zh
cp $in_dir/膀胱切除术的最佳手术方法:开放式手术与机器人辅助手术的比较.en.tok $out_dir/doc9.en
cp $in_dir/1型糖尿病患者胰岛素治疗中加用sotagliflozin的效果.zh.tok $out_dir/doc10.zh
cp $in_dir/1型糖尿病患者胰岛素治疗中加用sotagliflozin的效果.en.tok $out_dir/doc10.en
cp $in_dir/HIV相关癌症和疾病.zh.tok $out_dir/doc11.zh
cp $in_dir/HIV相关癌症和疾病.en.tok $out_dir/doc11.en
cp $in_dir/2017年慢性阻塞性肺疾病诊断和治疗的GOLD指南.zh.tok $out_dir/doc12.zh
cp $in_dir/2017年慢性阻塞性肺疾病诊断和治疗的GOLD指南.en.tok $out_dir/doc12.en<file_sep># ParaMed: A Parallel Corpus for Chinese-English Translation in the Medical Domain
## Description
`NEJM` is a Chinese-English parallel corpus crawled from the New England Journal of Medicine website. English articles are distributed through <https://www.nejm.org/> and Chinese articles are distributed through <http://nejmqianyan.cn/>. The corpus contains all article pairs (around 2000 pairs) since 2011.
This project was motivated by the fact that the Biomedical translation shared task in WMT19 did not provide training data for Chinese/English. In fact, we did not find any publically available parallel corpus between English and Chinese. We collected the `NEJM` corpus to faciliate machine translation between English and Chinese in the medical domain. We found that a remarkable boost in BLEU score can be achieved by pre-training on WMT18 and fine-tuning on the `NEJM` corpus.
## Data Download
You can download ~ 70% of data [here](https://github.com/boxiangliu/med_translation/blob/master/data/nejm-open-access.tar.gz?raw=true). Read on if you would like the entire dataset.
The New England Journal of Medicine is a pay-for-access journal. We are therefore prohibited by their copyright policy to freely distribute the entire dataset. However, Journal Watch and Original Research articles are open access six months after the initial publication. These articles make up about ~ 70% of the entire dataset and you can access them immediately using the link above.
If you are a NEJM subscriber through an institution or a personal account, as we belive most biomedical researchers are, you are entitled to access the full data. Please email us at <<EMAIL>> for access to the entire dataset.
## Installation & Prerequisite
The code in this repository was written in `python 3.7`.
First you need to clone the repository:
git clone https://github.com/boxiangliu/med_translation.git
Then install the following python packages if they are not installed yet.
- selenium
- numpy
- pandas
- matplotlib
- seaborn
- nltk
Also install these packages outside of python:
- [eserix](https://github.com/emjotde/eserix)
- [bifixer](https://github.com/bitextor/bifixer)
- [Microsoft bilingual sentence aligner](https://www.microsoft.com/en-us/download/details.aspx?id=52608&from=https%3A%2F%2Fresearch.microsoft.com%2Fen-us%2Fdownloads%2Faafd5dcf-4dcc-49b2-8a22-f7055113e656%2F)
- [OpenNMT](https://github.com/OpenNMT/OpenNMT-py)
## Reproducing the paper
WARNING: Those without access to NEJM will likely not be able to run all steps in this repo.
### 1. Crawl the NEJM website
In `crawl/crawl.py`, replace the value of `nejm_username` and `nejm_password` with your credentials. Then run:
python3 crawler/crawl.py
This step crawls thousands of articles from NEJM and will take a number of hours.
### [Optional] Plot summary statistics of crawled articles
To plot the distribution of articles by year and by type, run:
python3 crawler/url_stat.py
### Filter crawled articles
The crawled articles are peppered with bits and pieces of noisy text. To remove them, run:
python3 crawler/filter.py
### [Optional] Compare pre- and post-filtered articles
To see the effect of filtering on article lengths, run:
python3 crawler/article_stat.py
## 2. Preprocessing
We will normalize, break up paragraphs into sentences, tokenize and truecase.
### Normalize punctuations
We will standardize English and Chinese punctuations. Run:
bash preprocess/normalize.sh
### Split paragraphs into sentences
We will split English and Chinese paragraphs into sentences using eserix:
bash preprocess/detect_sentences/eserix.sh
### [Optional] Compare eserix with punkt
We can also split paragraphs with another popular python module called `punkt`, and compare the performance of the two algorithms.
python3 preprocess/detect_sentences/punkt.py
python3 preprocess/detect_sentences/sent_stat.py
### Tokenize and truecase
The final preprocessing steps will be tokenization and truecasing
bash preprocess/tokenize.sh
bash preprocess/truecase.sh
## 3. Sentence alignment
### Manual alignment
In order to compare automatic sentence alignment algorithms, we need to establish a set of ground truth alignment. Lucky for you, we have done the dirty work of aligning sentences. You can download the raw sentences (unaligned) [here](https://github.com/boxiangliu/med_translation/blob/master/data/manual_align_input.tar.gz?raw=true). Create a directory and untar into this directory. These will be used as input to the sentence alignment algorithm below.
mkdir ../processed_data/preprocess/manual_align/input/
cd ../processed_data/preprocess/manual_align/input/
tar xzvf manual_align_input.tar.gz
Next download the manual alignment result [here](https://raw.githubusercontent.com/boxiangliu/med_translation/master/data/align.txt). Place it into the following directory.
mkdir ../processed_data/preprocess/manual_align/alignment/
The alignment result will be used to assess the quality of alignment algorithms.
### Assessment of alignment algorithms
We assess the performance of [Gale-Church](https://www.aclweb.org/anthology/J93-1004.pdf) (length-based), [Microsoft aligner](https://www.microsoft.com/en-us/download/details.aspx?id=52608&from=https%3A%2F%2Fresearch.microsoft.com%2Fen-us%2Fdownloads%2Faafd5dcf-4dcc-49b2-8a22-f7055113e656%2F) (Lexicon-based), and [Bleualign](https://github.com/rsennrich/Bleualign) (translation-based). Install them on your system.
Next run the following commands to align sentences using all algorithms.
#### Align with Moore's algorithm:
bash evaluation/nejm/align/moore/input.sh
bash evaluation/nejm/align/moore/align.sh
#### Align with Bleualign and Gale-Church algorithm:
bash evaluation/nejm/align/bleualign/input.sh
bash evaluation/nejm/align/bleualign/translate.sh
bash evaluation/nejm/align/bleualign/align.sh
Evaluate the performance of all algorithms:
bash evaluation/nejm/evaluate.sh
python3 evaluation/nejm/vis_pr_curve.py
### Align the entire corpus
In the manuscript, we found that the Microsoft aligner gave the best performance. We use it to align the entire corpus.
bash alignment/moore/input.sh
bash alignment/moore/align.sh
## Clean the `NEJM` corpus
Some sentences such as paragraph headings will be duplicated many times. To remove them, run the following command:
bash clean/concat.sh
bash clean/clean.sh
## Split the data into train, dev and test:
Run the following to split data into train (~ 93000), development (~ 2000), and test (~ 2000):
bash split_data/split_train_test.py
## Translation
To determine whether `NEJM` helps improving machine translation in the biomedical domain, we first train a baseline model using WMT18 English/Chinese dataset and fine-tune the model on `NEJM`.
WMT18 preprocessed en/zh data can be downloaded [here](http://data.statmt.org/wmt18/translation-task/preprocessed/zh-en/).
Train the baseline model:
bash translation/wmt18/train.sh
Subset the dataset to see translation performance improvement at various corpus sizes.
python3 subset/subset.py
Fine-tune on NEJM dataset and test the fine-tune performance:
python3 translation/nejm/finetune.sh
python3 translation/nejm/test_finetune.sh
Train on `NEJM` from scratch (without using WMT18):
python3 translation/nejm/train_denovo.sh
python3 translation/nejm/test_denovo.sh
Plot bleu score:
python3 translation/nejm/plot_bleu.py
## Questions?
If you have any questions, please email us at <<EMAIL>>
<file_sep>/Users/boxiang/Documents/work/Baidu/projects/med_translation/processed_data/crawler/nejm/articles_norm/不停跳或停跳冠状动脉旁路移植术后的5年结局研究.zh
one English paragraph split into two Chinese paragraphs:
'we found an apparent differential effect of the two approaches to surgery according to whether patients had diabetes. although diabetes status was a prespecified subgroup variable at the inception of the trial, we did not specify a reason to expect a difference between the treatment groups. our intention was to examine the consistency of effects of the two types of cabg and was motivated by the results of trials of cabg (mostly on-pump) versus pci in which cabg was found to be superior to pci, particularly among patients with diabetes.27,28 our results suggesting that there may be an interaction between cabg type and diabetes status should be viewed in light of the fact that we did examine several other prespecified subgroups and that so far none of the other trials have reported a significant interaction with this variable. our results should stimulate a systematic analysis of data from all other trials on the relative long-term effect of the two techniques according to diabetes status. therefore, until further confirmation is available from other trials, caution should be exercised in interpreting this subgroup result.'
'我们发现这两种手术对于患者有无糖尿病存在明显的差异效应。尽管在试验起初,糖尿病状态是一个预设的亚组变量,但我们并未明确提出一个预期治疗组间差异的理由。我们的目的是检查两种类型cabg效应的一致性。本试验的动机来源于cabg(多数是停跳的)和pci的对比试验结果:在这些结果中人们发现cabg优于pci27,28,尤其是在糖尿病患者中。我们的结果显示在cabg类型和糖尿病状态之间也许存在某种交互作用。鉴于我们确实检查了一些其他预设亚组,而且迄今没有其他试验报告过这个变量存在显著交互作用这些事实,我们的结果应当得到重视。'
'我们的结果应该激发系统的数据分析,这些数据需要来自根据糖尿病状态关于两种技术相对远期效应的所有其他试验。所以,在从其他试验中得到进一步确定前,我们应该谨慎解释这个亚组结果。'
/Users/boxiang/Documents/work/Baidu/projects/med_translation/processed_data/crawler/nejm/articles_norm/比伐卢定和肝素单药治疗心肌梗死的比较研究.en
One English paragraph split into two english paragraphs.
'在接受pci的心肌梗死患者中,任何原因死亡、心肌梗死或大出血这一复合终点的发生率在比伐卢定组不低于肝素单药治疗组(由瑞典心肺基金会[swedish heart-lung foundation]等资助;在validate-swedeheart clinical-trialsregister.eu注册号为2012-005260-10;在clinicaltrials.gov注册号为nct02311231)。'
'among patients undergoing pci for myocardial infarction, the rate of the composite of death from any cause, myocardial infarction, or major bleeding was not lower among those who received bivalirudin than among those who received heparin monotherapy. (funded by the swedish heart-lung foundation and others; validate-swedeheart clinicaltrialsregister.eu number, 2012-005260-10'
'; clinicaltrials.gov number, nct02311231'
Two English paragraphs merged into one Chinese paragraphs.
'对stemi患者,我们在血管造影后、随机化之前得到了有目击证人的患者口头同意。在接下来的24小时内,我们提供给患者关于本试验的书面信息,患者提供书面知情同意,确认继续参与试验。对nstemi患者,我们在血管造影前获得了患者的书面同意。 在血管造影后和pci前,我们通过在线瑞典冠脉造影和血管成形术登记系统(swedish coronary angiography and angioplasty registry,scaar;swedeheart登记系统的一个构成部分)以开放标签的形式将患者随机分配接受经静脉比伐卢定(medicines company)或经动脉普通肝素(leo pharma)治疗。我们采用电脑生成列表,根据心肌梗死类型(stemi或nstemi)和医院进行分层,采用排列区组按照1∶1的比例进行随机化。'
'in patients with stemi, witnessed oral consent was obtained after angiography and before randomization. within the following 24 hours, after written information about the trial had been provided, the patients confirmed further participation by providing written informed consent. in patients with nstemi, written consent was obtained before angiography.'
'after angiography but before pci, the patients were randomly assigned, through the online swedish coronary angiography and angioplasty registry (scaar; which is a component of the swedeheart registry), to receive in an open-label fashion either intravenous bivalirudin (the medicines company) or intraarterial unfractionated heparin (leo pharma). randomization was performed in a 1:1 ratio in permuted blocks, with the use of a computer-generated list, with stratification according to type of myocardial infarction (stemi or nstemi) and hospital
Chinese journal watch has citation<file_sep>#!/usr/bin/env python
# The output of Bifixer has two additional columns.
# The 5th column is a hash of the sentence pair
# The 6th column is a score (higher = better)
# For all sentences with the same hash,
# this script keeps one with the highest score.
import sys
in_fn = sys.argv[1]
out_fn = sys.argv[2]
# in_fn = "/mnt/scratch/boxiang/projects/med_translation/"\
# "processed_data/clean/input/nejm.bifixer.all"
# out_fn = "/mnt/scratch/boxiang/projects/med_translation/"\
# "processed_data/clean/input/nejm.rm_dup.all"
print("Sifting through all sentences...")
with open(in_fn, "r") as f:
keep_dict = dict()
remove_set = set()
for i, line in enumerate(f):
split_line = line.strip().split("\t")
zh = split_line[2]
en = split_line[3]
hash_ = split_line[4]
score = float(split_line[5])
# If hash not in keep_dict, add it.
if hash_ not in keep_dict:
keep_dict[hash_] = (score, i, zh, en)
# If hash is in keep_dict, compare with the
# existing score
else:
if keep_dict[hash_][0] < score:
remove_set.add(keep_dict[hash_])
keep_dict[hash_] = (score, i, zh, en)
else:
remove_set.add((score, i, zh, en))
print("Saving sentences with highest scores.")
lines_to_keep = set()
for k, v in keep_dict.items():
lines_to_keep.add(v[1])
with open(in_fn, "r") as fin, open(out_fn, "w") as fout:
for i, line in enumerate(fin):
if i in lines_to_keep:
fout.write(line)
<file_sep>import os
import glob
import pandas as pd
import matplotlib
matplotlib.use('tkagg')
import matplotlib.pyplot as plt
import seaborn as sns
in_dir = "../processed_data/evaluation/nejm/evaluate/"
out_dir = "../processed_data/evaluation/nejm/vis_pr_curve/"
os.makedirs(out_dir, exist_ok=True)
def read_precision_recall(in_dir):
pr_files = glob.glob(f'{in_dir}/*_*.pr')
container = []
for fn in pr_files:
basename = os.path.basename(fn)
aligner = basename.split("_")[0]
threshold = basename.split("_")[-1].replace(".pr", "")
threshold = float(threshold)
print(f"Aligner: {aligner}")
print(f"Threshold: {threshold}")
try:
df = pd.read_table(fn)
df["aligner"] = aligner
df["threshold"] = threshold
container.append(df)
except:
print(f"Empty (Aligner: {aligner}; Threshold: {threshold})")
pr = pd.concat(container)
return pr
def get_f1(precision, recall):
if precision + recall > 0:
f1 = 2*precision*recall/(precision + recall)
else:
f1 = 0
return f1
def p1(pr):
data = pr[pr["type"] == "1 - 1"]
plt.figure()
sns.scatterplot(x="precision", y="recall", hue="aligner", data=data)
plt.tight_layout()
plt.savefig(f"{out_dir}/pr_curve.pdf")
def p2(max_f1):
fig, ax = plt.subplots()
sns.barplot(x="aligner", y="max_f1", data=max_f1)
ax.set(xlabel="Aligner", ylabel="F1")
ax.set_xticklabels(["Microsoft", "Bleualign\nUnidirection", "Bleualign\nBidirection", "Gale-Church"])
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
fig.set_size_inches(4,3)
fig.tight_layout()
fig.savefig(f"{out_dir}/maximum_f1.pdf")
def p3(f1_pr):
fig, ax = plt.subplots()
sns.barplot(x="aligner", y="value", hue="variable", data=f1_pr)
ax.set(xlabel=None, ylabel="Precision/Recall/F1")
ax.set_xticklabels(["Microsoft", "Uni-dir.\nBleualign", "Bi-dir.\nBleualign", "Gale-Church"])
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
plt.legend(bbox_to_anchor=(0.0, 1.0), ncol=3, loc="lower left", borderaxespad=0.1)
fig.set_size_inches(4,3)
fig.tight_layout()
fig.savefig(f"{out_dir}/precision_recall_f1.pdf")
def main():
pr = read_precision_recall(in_dir)
p1(pr)
pr["f1"] = pr.apply(lambda x: get_f1(x["precision"], x["recall"]), axis=1)
max_f1 = pr.groupby("aligner").agg(max_f1=pd.NamedAgg("f1", "max")).reset_index()
max_f1 = max_f1.sort_values("max_f1", ascending=False)
max_f1 = max_f1.drop(index=3) # Drop hunalign
p2(max_f1)
f1_pr = []
for aligner, f1 in zip(max_f1["aligner"], max_f1["max_f1"]):
temp = pr[(pr['aligner'] == aligner) & (pr["f1"] == f1)].drop("threshold", axis=1).drop_duplicates()
f1_pr.append(temp)
f1_pr = pd.concat(f1_pr)
f1_pr = pd.melt(f1_pr, id_vars=["type", "aligner"])
f1_pr["variable"] = f1_pr["variable"].apply(lambda x: x.title())
p3(f1_pr)
if __name__ == "__main__":
main()<file_sep>moses=~/software/mosesdecoder/scripts/tokenizer/
normalize=$moses/normalize-punctuation.perl
in_dir=../processed_data/crawler/nejm/articles/
out_dir=../processed_data/preprocess/normalize/
mkdir -p $out_dir
for f in $in_dir/*/*/*.filt.*; do
echo $f
out_fn=$(basename $f)
cat $f | $normalize | awk NF > $out_dir/$out_fn
done<file_sep># This will calculate the precision and recall for the following algoriths:
# ba: Bleualign
# ba2: Bleualign with bidirectional translation
# gc: Gale-Church
# moore: Moore's IBM 1 model.
in_dir=../processed_data/evaluation/nejm/align/
out_dir=../processed_data/evaluation/nejm/evaluate/
mkdir -p $out_dir
declare -A container=( [moore]=moore/align/ \
[hunalign]=hunalign/align/ [gale_church]=bleualign/align/gale_church/ \
[bleualign1]=bleualign/align/one_sided/ [bleualign2]=bleualign/align/two_sided/ )
for algo in ${!container[@]}; do
echo Algorithm: $algo
dir=${container[$algo]}
for wd in $in_dir/$dir/*/; do
threshold=$(basename $wd)
echo $threshold
# This will generate src <=> tgt alignment.
python3 evaluation/nejm/gen_align_file.py \
--src_fn $wd/align.zh \
--tgt_fn $wd/align.en \
--out_fn $out_dir/${algo}_${threshold}.align
# Evaluate algorithm:
python3 evaluation/nejm/evaluate.py \
--align_fn ../processed_data/preprocess/manual_align/alignment/align.txt \
--pred_fn $out_dir/${algo}_${threshold}.align \
--out_fn $out_dir/${algo}_${threshold}.pr
done
done<file_sep>import pandas as pd
import os
import matplotlib
matplotlib.use('tkagg')
import matplotlib.pyplot as plt
import seaborn as sns
import glob
import re
from collections import defaultdict
denovo_dir = "../processed_data/translation/nejm/train_denovo/test/"
finetune_dir = "../processed_data/translation/nejm/finetune/test/"
out_dir = "../processed_data/translation/nejm/plot_bleu/"
os.makedirs(out_dir, exist_ok=True)
data = ["wmt18", "nejm.4000", "nejm.8000", "nejm.16000", \
"nejm.32000", "nejm.64000", "nejm.93303"]
direction = ["zh2en", "en2zh"]
container = defaultdict(list)
for h, in_dir in [("de novo", denovo_dir), ("finetune", finetune_dir)]:
for i,d in enumerate(data):
for j in direction:
fn = f"{in_dir}/{d}.{j}.tc.bleu"
try:
with open(fn, "r") as f:
line = f.readlines()[0].strip()
bleu = re.search("BLEU = [0-9\.]+", line).group(0).split("=")[1].strip()
bleu = float(bleu)
container["bleu"].append(bleu)
container["data_ord"].append(i)
container["data"].append(d)
container["direction"].append(j)
container["train"].append(h)
except:
print(f"{fn} does not exist.")
bleu = pd.DataFrame(container)
zeros = pd.DataFrame({"bleu":[0, 0], "data_ord": [0, 0], "data": ["nejm.0", "nejm.0"], "direction":["zh2en", "en2zh"], "train": ["de novo", "de novo"]})
bleu = pd.concat([zeros, bleu])
ord2num = {0: 0, 1: 1, 2: 2, 3: 3, 4: 4, 5: 5, 6: 5.46}
bleu["x"] = bleu["data_ord"].apply(lambda x: ord2num[x])
plt.ion()
fig, ax = plt.subplots(1, 1)
g = sns.lineplot(x="x", y="bleu", hue="direction", data=bleu, legend="brief", style="train", markers=["o","o"], dashes=[(2,1),""])
fig.set_size_inches(5,4)
fig.tight_layout()
g.legend_.texts[0].set_position((-40,0))
g.legend_.texts[0].set_text("Direction")
g.legend_.texts[3].set_position((-40,0))
g.legend_.texts[3].set_text("Training")
ax.set_xlabel("In-Domain Sentence Pairs")
ax.set_ylabel("1-ref BLEU")
ax.set_xticks([0, 1, 2, 3, 4, 5, 5.46])
ax.set_xticklabels(["0", "4000", "8000", "16000", "32000", "64000", ""])
ax.legend()
plt.savefig(f"{out_dir}/bleu.pdf")
plt.close()<file_sep>owd=../data/wmt19_biomed_modified/
# Create a copy of align_validation_zh_en.txt
# for downstream modifications.
cp ../data/{wmt19_biomed,wmt19_biomed_modified}
mv $owd/align_validation_zh_en{.txt,.orig.txt}
join -1 1 -2 2 -t $'\t' \
-o 1.1 2.1 1.2 1.3 \
$owd/align_validation_zh_en.orig.txt \
<(sort -k2 $owd/mapdocs_zh_en.txt) \
| sort -V -k2 > $owd/align_validation_zh_en.txt
rm $owd/align_validation_zh_en.orig.txt
# The following manual modifications were made
# to improve the accuracy of alignment.
# doc1 (omitted <=> 10, NO_ALIGNMENT): changed to OK
# doc2 (2 <=> 3,4, TARGET_GREATER_SOURCE): changed to 2,3 <=> 3,4
# doc2 (3 <=> omitted, NO_ALIGNMENT): same as above
# doc3 (3 <=> 5, TARGET_GREATER_SOURCE): changed to 3 <=> 5,6
# doc3 (4 <=> 6,7, SOURCE_GREATER_TARGET): changed to 4 <=> 7
# doc5 (9 <=> 11,12, NO_ALIGNMENT): changed to OK
# doc6: removed, sentences not found.
# doc7: removed, sentences not found.
# doc8 (2 <=> 4,5, TARGET_GREATER_SOURCE): changed to 2 <=> 4
# doc8 (3 <=> 6,7, SOURCE_GREATER_TARGET): changed to 3 <=> 5,6,7
# doc9 (1 <=> 1, SOURCE_GREATER_TARGET): removed 摘要 from source side.
# doc11 (1 <=> 1, SOURCE_GREATER_TARGET): removed 目的:
# doc12: removed, sentence not found.
# doc15: removed, sentence not found.
# doc16 (3 <=> 3,4,5, TARGET_GREATER_SOURCE): 3 <=> 3,4
# doc16 (4 <=> 6, SOURCE_GREATER_TARGET): 4 <=> 5,6
# doc18: removed, sentences not found.
# doc20 (4 <=> 5, SOURCE_GREATER_TARGET): changed to OK
# doc22, 23, 24, 27, 29: removed, sentences not found.
# doc30 (4 <=> 4, SOURCE_GREATER_TARGET): 4 <=> 4, 5
# doc34: removed, sentence not found.
# doc35 (1 <=> 1): removed 目的:
# doc35 (omitted <=> 3, NO_ALIGNMENT): changed to OK
# doc35 (4 <=> 5, SOURCE_GREATER_TARGET): 4 <=> 5,6
# doc35 (5 <=> 6,7,8, TARGET_GREATER_SOURCE): 5 <=> 7,8
# doc36 (1 <=> 1,2, TARGET_GREATER_SOURCE): changed to OK
# doc37: removed, sentences not found.
# doc38 (1 <=> 1): removed 目的:
# doc44,46: removed, sentences not found.
# doc47, 3 <=> 4,5,6,7, OVERLAP and doc47. 4 <=> 8, NO_ALIGNMENT: combined into 3,4 <=> 4,5,6,7,8.
# doc48 (3 <=> 3,4,5,6, SOURCE_GREATER_TARGET): 3 <=> 3,4,5,6,7
# doc48 (omitted <=> 7): see above
# doc49 (1 <=> 1): removed 目的:
# dpc53: removed, sentences not found.
# doc55: removed, sentences not found.
# doc57: removed, sentences not found.
# doc58 (8 <=> 11, SOURCE_GREATER_TARGET): changed to OK
# doc60 (8 <=> 9, SOURCE_GREATER_TARGET): changed to OK
# doc61: removed, sentences not found.
# doc67, 68, 69, 78, 80: removed, sentences not found.
# doc82 (3 <=> 5, SOURCE_GREATER_TARGET): changed to OK
# doc83, 85: removed, sentences not found
# doc90 (2 <=> 2, SOURCE_GREATER_TARGET): changed to TARGET_GREATER_SOURCE
# doc93 (1 <=> 2, SOURCE_GREATER_TARGET) and doc93 (omitted <=> 1, NO_ALIGNMENT): combined
# Extract sentences from medline_zh2en_{zh,en}.txt
# for evaluation.
awk 'BEGIN {FS = "\t"}; {print $3}' $owd/medline_zh2en_zh.txt \
> $owd/medline_zh2en_zh.textonly.txt
awk 'BEGIN {FS = "\t"}; {print $3}' $owd/medline_zh2en_en.txt \
> $owd/medline_zh2en_en.textonly.txt<file_sep>export data=../processed_data/preprocess/manual_align/input/
export model=../processed_data/translation/wmt18/train/models/
export ONMT=~/projects/OpenNMT-py
export moses_scripts=~/software/mosesdecoder/scripts/
export out_dir=../processed_data/evaluation/nejm/align/bleualign/translate/
mkdir -p $out_dir
# Iterate through all articles:
for f in $data/doc*; do
echo $f
base=$(basename $f)
ln $f $out_dir/$base
done
n=0
for zh in $out_dir/doc*.zh; do
n=$(($n+1))
en=${zh/.zh/.en}
echo Chinese Article: $zh
echo English Article: $en
echo Document No.: $n
## zh => en ##
# Apply BPE:
$ONMT/tools/apply_bpe.py \
-c ../processed_data/translation/wmt18/train/data/bpe-codes.zh \
< $zh \
> $zh.bpe
# Translate to English:
$ONMT/onmt/bin/translate.py \
-model $model/zh2en_step_270000.pt \
-src $zh.bpe \
-output $zh.bpe.2en \
-replace_unk -verbose \
-batch_size 1 \
-gpu 0
# Remove BPE characters:
cat $zh.bpe.2en \
| sed -E 's/(@@ )|(@@ ?$)//g' > $zh.2en
# en => zh
# Apply BPE:
$ONMT/tools/apply_bpe.py \
-c ../processed_data/translation/wmt18/train/data/bpe-codes.en \
< $en \
> $en.bpe
# Translate to Chinese:
$ONMT/onmt/bin/translate.py \
-model $model/en2zh_step_410000.pt \
-src $en.bpe \
-output $en.bpe.2zh \
-replace_unk -verbose \
-batch_size 1 \
-gpu 0
# Remove BPE characters:
cat $en.bpe.2zh \
| sed -E 's/(@@ )|(@@ ?$)//g' > $en.2zh
done
rm $out_dir/*bpe*<file_sep>in_dir=../processed_data/evaluation/nejm/align/bleualign/translate/
out_dir=../processed_data/evaluation/nejm/align/bleualign/input/
mkdir -p $out_dir
for zh in $in_dir/doc*.zh; do
en=${zh/.zh/.en}
en_base=$(basename $en)
zh_base=$(basename $zh)
stem=${zh_base%.*}
echo "Document Number: $stem"
# Add document-sentence markers:
awk '{print $0" | "v1","NR}' \
v1=$stem $zh > \
$out_dir/$zh_base.mark
awk '{print $0" | "v1","NR}' \
v1=$stem $zh.2en > \
$out_dir/$zh_base.2en.mark
awk '{print $0" | "v1","NR}' \
v1=$stem $en > \
$out_dir/$en_base.mark
awk '{print $0" | "v1","NR}' \
v1=$stem $en.2zh > \
$out_dir/$en_base.2zh.mark
done<file_sep># Align NEJM articles manually to create a gold-standard.
# Randomly select 3 articles:
# These are articles are in /mnt/scratch/boxiang/projects/\
# med_translation/processed_data/preprocess/sentences/
# This section creates the alignment file.
# Use the following commands to open Chinese and English docs side-by-side:
sent_dir="/mnt/scratch/boxiang/projects\
/med_translation/processed_data/preprocess/archive/sentences/"
awk '{print NR,$0}' $sent_dir/鼻咽癌的吉西他滨联合顺铂诱导化疗.zh | vim -
awk '{print NR,$0}' $sent_dir/鼻咽癌的吉西他滨联合顺铂诱导化疗.en | vim -
awk '{print NR,$0}' $sent_dir/饮水可对饮用含糖饮料产生多大程度的对抗作用.zh | vim -
awk '{print NR,$0}' $sent_dir/饮水可对饮用含糖饮料产生多大程度的对抗作用.en | vim -
awk '{print NR,$0}' $sent_dir/帕妥珠单抗和曲妥珠单抗辅助治疗早期HER2阳性乳腺癌.zh | vim -
awk '{print NR,$0}' $sent_dir/帕妥珠单抗和曲妥珠单抗辅助治疗早期HER2阳性乳腺癌.en | vim -
awk '{print NR,$0}' $sent_dir/转移性去势抵抗性前列腺癌的恩杂鲁胺耐药.zh | vim -
awk '{print NR,$0}' $sent_dir/转移性去势抵抗性前列腺癌的恩杂鲁胺耐药.en | vim -
awk '{print NR,$0}' $sent_dir/婴儿B群链球菌疾病预防指南更新.zh | vim -
awk '{print NR,$0}' $sent_dir/婴儿B群链球菌疾病预防指南更新.en | vim -
# awk '{print NR,$0}' $sent_dir/CAR-T为白血病及其他癌症患者带来的希望和挑战.zh | vim -
# awk '{print NR,$0}' $sent_dir/CAR-T为白血病及其他癌症患者带来的希望和挑战.en | vim -
awk '{print NR,$0}' $sent_dir/黑种人理发店可帮助顾客降血压.zh | vim -
awk '{print NR,$0}' $sent_dir/黑种人理发店可帮助顾客降血压.en | vim -
awk '{print NR,$0}' $sent_dir/内科患者应用阿哌沙班和依诺肝素预防血栓形成的比较.zh | vim -
awk '{print NR,$0}' $sent_dir/内科患者应用阿哌沙班和依诺肝素预防血栓形成的比较.en | vim -
awk '{print NR,$0}' $sent_dir/尼拉帕尼用于铂类敏感型复发性卵巢癌的维持治疗.zh | vim -
awk '{print NR,$0}' $sent_dir/尼拉帕尼用于铂类敏感型复发性卵巢癌的维持治疗.en | vim -
awk '{print NR,$0}' $sent_dir/膀胱切除术的最佳手术方法:开放式手术与机器人辅助手术的比较.zh | vim -
awk '{print NR,$0}' $sent_dir/膀胱切除术的最佳手术方法:开放式手术与机器人辅助手术的比较.en | vim -
awk '{print NR,$0}' $sent_dir/1型糖尿病患者胰岛素治疗中加用sotagliflozin的效果.zh | vim -
awk '{print NR,$0}' $sent_dir/1型糖尿病患者胰岛素治疗中加用sotagliflozin的效果.en | vim -
awk '{print NR,$0}' $sent_dir/HIV相关癌症和疾病.zh | vim -
awk '{print NR,$0}' $sent_dir/HIV相关癌症和疾病.en | vim -
awk '{print NR,$0}' $sent_dir/2017年慢性阻塞性肺疾病诊断和治疗的GOLD指南.zh | vim -
awk '{print NR,$0}' $sent_dir/2017年慢性阻塞性肺疾病诊断和治疗的GOLD指南.en | vim -
# NOTE: The results are placed in ../processed_data/preprocess/manual_align/align_validation_zh_en.txt
# This section creates two files
# nejm_valid.en and nejm_valid.zh
out_dir="/mnt/scratch/boxiang/projects/med_translation/\
processed_data/preprocess/manual_align/"
articles=(鼻咽癌的吉西他滨联合顺铂诱导化疗 \
饮水可对饮用含糖饮料产生多大程度的对抗作用 \
帕妥珠单抗和曲妥珠单抗辅助治疗早期HER2阳性乳腺癌 \
转移性去势抵抗性前列腺癌的恩杂鲁胺耐药 \
婴儿B群链球菌疾病预防指南更新 \
黑种人理发店可帮助顾客降血压 \
内科患者应用阿哌沙班和依诺肝素预防血栓形成的比较 \
尼拉帕尼用于铂类敏感型复发性卵巢癌的维持治疗 \
膀胱切除术的最佳手术方法:开放式手术与机器人辅助手术的比较 \
1型糖尿病患者胰岛素治疗中加用sotagliflozin的效果 \
HIV相关癌症和疾病 \
2017年慢性阻塞性肺疾病诊断和治疗的GOLD指南)
[[ -f $out_dir/nejm_valid.zh ]] && rm $out_dir/nejm_valid.zh
[[ -f $out_dir/nejm_valid.en ]] && rm $out_dir/nejm_valid.en
[[ -f $out_dir/nejm_valid.tok.zh ]] && rm $out_dir/nejm_valid.tok.zh
[[ -f $out_dir/nejm_valid.tok.en ]] && rm $out_dir/nejm_valid.tok.en
count=0
for article in ${articles[@]}; do
count=$(($count+1))
for lang in zh en; do
awk 'BEGIN {OFS="\t"}{print "doc"n,NR,$0}' n=$count \
$sent_dir/$article.$lang.tok >> $out_dir/nejm_valid.tok.$lang
awk 'BEGIN {OFS="\t"}{print "doc"n,NR,$0}' n=$count \
$sent_dir/$article.$lang >> $out_dir/nejm_valid.$lang
done
done
# The next step is to modify the manual align file
# 1. add doc# at the beginning of each line
# 2. add OK at the end of each line
# 3. add <=> between English and Chinese line numbers
# 4. append lines to $out_dir/align_validation_zh_en.txt
# Create validation set:
python3 utils/gen_para_corp.py \
--align_fn ../processed_data/preprocess/manual_align/align_validation_zh_en.txt \
--zh_fn ../processed_data/preprocess/manual_align/nejm_valid.tok.zh \
--en_fn ../processed_data/preprocess/manual_align/nejm_valid.tok.en \
--out_fn ../processed_data/preprocess/manual_align/nejm_valid.parallel.tok
python3 utils/gen_para_corp.py \
--align_fn ../processed_data/preprocess/manual_align/align_validation_zh_en.txt \
--zh_fn ../processed_data/preprocess/manual_align/nejm_valid.zh \
--en_fn ../processed_data/preprocess/manual_align/nejm_valid.en \
--out_fn ../processed_data/preprocess/manual_align/nejm_valid.parallel
<file_sep>#!/usr/bin/env bash
# Author : <NAME>, Modified by <NAME>
# Created : Nov 05, 2019
ONMT=/mnt/home/boxiang/projects/OpenNMT-py
OUT=../processed_data/translation/wmt18/train_rnn/
DATA=/mnt/data/boxiang/wmt18/
SRC=zh
TGT=en
TRAIN=$DATA/train/corpus
VALID=$DATA/dev/newsdev2017.tc
TEST=$DATA/dev/newstest2017.tc
BPE_OPS=16000
moses_scripts=~/software/mosesdecoder/scripts/
echo "Output dir = $OUT"
[ -d $OUT ] || mkdir -p $OUT
[ -d $OUT/data ] || mkdir -p $OUT/data/{zh2en,en2zh}
[ -d $OUT/models ] || mkdir -p $OUT/models/{zh2en,en2zh}
[ -d $OUT/test ] || mkdir -p $OUT/test/{zh2en,en2zh}
# echo "Step 1a: Preprocess inputs"
# echo "BPE on source"
# $ONMT/tools/learn_bpe.py -v -s $BPE_OPS < $TRAIN.$SRC > $OUT/data/bpe-codes.$SRC
# $ONMT/tools/learn_bpe.py -v -s $BPE_OPS < $TRAIN.$TGT > $OUT/data/bpe-codes.$TGT
# $ONMT/tools/apply_bpe.py -c $OUT/data/bpe-codes.$SRC < $TRAIN.$SRC > $OUT/data/train.src
# $ONMT/tools/apply_bpe.py -c $OUT/data/bpe-codes.$SRC < $VALID.$SRC > $OUT/data/valid.src
# $ONMT/tools/apply_bpe.py -c $OUT/data/bpe-codes.$TGT < $TRAIN.$TGT > $OUT/data/train.tgt
# $ONMT/tools/apply_bpe.py -c $OUT/data/bpe-codes.$TGT < $VALID.$TGT > $OUT/data/valid.tgt
#: <<EOF
# echo "Step 1b: Preprocess"
# zh -> en
# python $ONMT/preprocess.py \
# -src_seq_length 999 \
# -tgt_seq_length 999 \
# -train_src $OUT/data/train.src \
# -train_tgt $OUT/data/train.tgt \
# -valid_src $OUT/data/valid.src \
# -valid_tgt $OUT/data/valid.tgt \
# -save_data $OUT/data/zh2en/processed
# # en -> zh
# python $ONMT/preprocess.py \
# -src_seq_length 999 \
# -tgt_seq_length 999 \
# -train_src $OUT/data/train.tgt \
# -train_tgt $OUT/data/train.src \
# -valid_src $OUT/data/valid.tgt \
# -valid_tgt $OUT/data/valid.src \
# -save_data $OUT/data/en2zh/processed
echo "Step 2: Train"
# python restartsub.py TitanXx8 8 zh2en \
# "python $ONMT/train.py \
# -data $OUT/data/zh2en/processed \
# -save_model $OUT/models/zh2en \
# -layers 1 \
# -rnn_type LSTM \
# -rnn_size 512 \
# -word_vec_size 512 \
# -train_steps 500000 \
# -batch_size 4000 \
# -batch_type tokens \
# -normalization tokens \
# -optim adam \
# -learning_rate 0.001 \
# -label_smoothing 0.1 \
# -valid_steps 10000 \
# -save_checkpoint_steps 5000 \
# -world_size 1 \
# -gpu_ranks 0"
python restartsub.py TitanXx8 8 en2zh \
"python $ONMT/train.py \
-data $OUT/data/en2zh/processed \
-save_model $OUT/models/en2zh \
-layers 1 \
-rnn_type LSTM \
-rnn_size 512 \
-word_vec_size 512 \
-train_steps 500000 \
-batch_size 4000 \
-batch_type tokens \
-normalization tokens \
-optim adam \
-learning_rate 0.001 \
-label_smoothing 0.1 \
-valid_steps 10000 \
-save_checkpoint_steps 5000 \
-world_size 1 \
-gpu_ranks 0"<file_sep>data=../processed_data/preprocess/tokenize/
moses_scripts=~/software/mosesdecoder/scripts/
out_dir=../processed_data/preprocess/lowercase/
mkdir -p $out_dir
for article in $data/*; do
echo $article
base=$(basename $article)
cat $article | \
$moses_scripts/tokenizer/lowercase.perl \
> $out_dir/$base
done<file_sep>bleualign=~/projects/Bleualign/bleualign.py
moore=~/software/bilingual-sentence-aligner/
data=/mnt/scratch/boxiang/projects/med_translation/data/wmt19_biomed_modified/
# Bleualign
for doc in `awk '{print $1}' $data/medline_zh2en_zh.txt | uniq`; do
$bleualign --factored \
-s $data/separate_docs/${doc}_zh.snt \
-t $data/separate_docs/${doc}_en.snt \
--srctotarget $data/separate_docs/${doc}_zh.2en \
--printempty --verbosity 2 \
-o $data/$doc.ba
done
cat $data/separate_docs/doc*.ba-s > $data/align.tok.mark.ba-s
cat $data/separate_docs/doc*.ba-t > $data/align.tok.mark.ba-t
rm $data/separate_docs/doc*.ba-{s,t}
# Gale-Church:
for doc in `awk '{print $1}' $data/medline_zh2en_zh.txt | uniq`; do
$bleualign --factored \
-s $data/separate_docs/${doc}_zh.snt \
-t $data/separate_docs/${doc}_en.snt \
--srctotarget - \
--galechurch \
--printempty \
--verbosity 2 \
-o $data/$doc.gc
done
cat $data/separate_docs/doc*.gc-s > $data/align.tok.mark.gc-s
cat $data/separate_docs/doc*.gc-t > $data/align.tok.mark.gc-t
rm $data/separate_docs/doc*.gc-{s,t}
# Moore's algorithm (IBM 1):
# Adding parallel corpora for IBM 1 model construction.
head -n 100000 /mnt/data/boxiang/wmt18/train/corpus.zh > \
$data/separate_docs/wmt18_zh.snt
head -n 100000 /mnt/data/boxiang/wmt18/train/corpus.en > \
$data/separate_docs/wmt18_en.snt
# Moore's algorithm is modified to allow factored sentences.
# One can factor sentences by the pipe "|" character. Only the
# first factor will be used in alignment
cd $moore # Must run in this directory
perl $moore/align-sents-all-multi-file.pl \
$data/separate_docs/ 0.5
cat $data/separate_docs/doc*_zh.snt.aligned > $data/align.tok.mark.moore-s
cat $data/separate_docs/doc*_en.snt.aligned > $data/align.tok.mark.moore-t<file_sep>#!/usr/bin/env python
from collections import defaultdict
import pandas as pd
import os
wd = "../processed_data/preprocess/alignment/"
align_fn = f"{wd}/align_validation_zh_en.txt"
src_fn = f"{wd}/nejm_valid.zh"
tgt_fn = f"{wd}/nejm_valid.en"
out_dir = "../processed_data/publication/tables/tab1/tab1/"
os.makedirs(out_dir, exist_ok=True)
def read_sents(fn):
df = pd.read_table(fn, sep="\t", names=["doc","sent","text"])
return df
def parse_align_string(s):
if s == "omitted":
align = []
elif "," in s:
align = [int(x) for x in s.split(",")]
else:
align = [int(s)]
return align
def read_align(fn):
proto_df = defaultdict(list)
with open(fn, "r") as fin:
for line in fin:
split_line = line.split("\t")
doc = split_line[0]
alignment = split_line[1]
src_no, tgt_no = alignment.split(" <=> ")
src_list = parse_align_string(src_no)
tgt_list = parse_align_string(tgt_no)
align_type = f"{len(src_list)} - {len(tgt_list)}"
proto_df["doc"].append(doc)
proto_df["zh"].append(src_list)
proto_df["en"].append(tgt_list)
proto_df["type"].append(align_type)
df = pd.DataFrame(proto_df)
return df
def merge_align_n_text(align, zh, en):
proto_df = defaultdict(list)
for i in range(align.shape[0]):
doc = align["doc"].iloc[i]
zh_sents = align["zh"].iloc[i]
en_sents = align["en"].iloc[i]
zh_text = en_text = ""
for sent in zh_sents:
zh_text += zh[(zh["doc"] == doc) & (zh["sent"] == sent)]\
["text"].item()
for sent in en_sents:
en_text += en[(en["doc"] == doc) & (en["sent"] == sent)]\
["text"].item()
proto_df["zh_text"].append(zh_text)
proto_df["en_text"].append(en_text)
align["zh_text"] = proto_df["zh_text"]
align["en_text"] = proto_df["en_text"]
return align
def smrize_alignment(align):
df = align.groupby("type").\
agg(Count=pd.NamedAgg("doc","count")).\
reset_index()
s = sum(df["Count"])
df["Percent"] = ["{:.02f}%".format(x/s*100) for x in df["Count"]]
return df
def get_latex_table():
pass
zh = read_sents(src_fn)
en = read_sents(tgt_fn)
align = read_align(align_fn)
align = merge_align_n_text(align, zh, en)
smry = smrize_alignment(align)
with open(f"{out_dir}/count.tex", "w+") as f:
smry.to_latex(f, index=False)
align_type = ["0 - 1", "1 - 0", "1 - 1", "1 - 2", "2 - 1", "2 - 2", "2 - 3"]
zh_example = ["", "主要的安全结局是出血。",
"加用吉西他滨联合顺铂诱导化疗在2期试验中显示出很有前景的疗效。",
"在铂类敏感的复发性卵巢癌患者中,无论存在或不存在gBRCA突变或HRD状态,与接受安慰剂的患者相比,接受尼拉帕尼治疗的患者的中位无进展生存期显著延长,存在中度骨髓毒性(本研究由Tesaro公司资助;在ClinicalTrials.gov注册号为NCT01847274)。",
"sotagliflozin是一种口服钠-葡萄糖协同转运蛋白-1和2的抑制剂。我们评价了在1型糖尿病患者中联用胰岛素和sotagliflozin的安全性和疗效。",
"与安慰剂组相比,腹泻在帕妥珠单抗组较为常见(由霍夫曼-罗氏/基因泰克[Hoffmann-La Roche/Genentech]资助。APHINITY在ClinicalTrials.gov注册号为NCT01358877)。",
"这些患者被包括在非-gBRCA HRD-阳性亚组中(同源重组率降低被发现可引起低效的DNA修复。更多细节在补充附录的方法部分提供)。"]
en_example = ["No other potential conflict of interest relevant to this article was reported.","",
"Additional gemcitabine and cisplatin induction chemotherapy has shown promising efficacy in phase 2 trials.",
"Among patients with platinum-sensitive, recurrent ovarian cancer, the median duration of progression-free survival was significantly longer among those receiving niraparib than among those receiving placebo, regardless of the presence or absence of gBRCA mutations or HRD status, with moderate bone marrow toxicity. (Funded by Tesaro; ClinicalTrials.gov number, NCT01847274.)",
"We evaluated the safety and efficacy of sotagliflozin, an oral inhibitor of sodium–glucose cotransporters 1 and 2, in combination with insulin treatment in patients with type 1 diabetes.",
"Diarrhea was more common with pertuzumab than with placebo. (Funded by <NAME>/Genentech; APHINITY ClinicalTrials.gov number, NCT01358877.)",
"Such patients were included in the non-gBRCA HRD-positive subgroup. (Decreased rates of homologous recombination have been found to cause inefficient DNA repair. Additional details are provided in the Methods section in the Supplementary Appendix.)"]
example = pd.DataFrame({"type":align_type, "zh": zh_example, "en": en_example})
with open(f"{out_dir}/examples.tex", "w+") as f:
example.to_latex(f, index=False)
<file_sep>#!/usr/bin/env python
from __future__ import print_function
import os
import re
import sys
import time
import glob
import subprocess
import logging
starttime = time.time()
onehourseconds = 60 * 60
partitiontime = {}
partitiontime["1080Ti_short"] = 1.8
partitiontime["1080Ti_slong"] = 6 * 24
partitiontime["1080Ti"] = 12
partitiontime["1080Ti_spec"] = 24 + 12
partitiontime["P100"] = 12
partitiontime["V100"] = 12
partitiontime["V100_DGX"] = 12
partitiontime["2080Ti"] = 12
partitiontime["TitanXx8_short"] = 1.8
partitiontime["TitanXx8_slong"] = 6 * 24
partitiontime["TitanXx8"] = 12
partitiontime["M40x8_short"] = 1.8
partitiontime["M40x8_slong"] = 6 * 24
partitiontime["M40x8"] = 12
excludepool = ["159"]
'''
1080Ti 2 smbmul2gpus "train.py -data data_256/2M.bpe256.zh2en.tt4 -save_model multiplegpus/2gpus_smbatch/model -gpuid 0 1 -layers 6 -heads 8 -transformer_ff 2048 -rnn_size 512 -word_vec_size 512 -encoder_type transformer -decoder_type transformer -position_encoding -max_generator_batches 32 -dropout 0.1 -batch_size 1024 -batch_type tokens -normalization tokens -accum_count 4 -optim adam -adam_beta2 0.997 -decay_method noam -warmup_steps 16000 -learning_rate 2 -max_grad_norm 0 -parametersm_init 0 -parametersm_init_glorot -label_smoothing 0.1 -train_steps 1000000" multiplegpus/2gpus_smbatch/log.2gpusmb.txt
'''
partition_name = sys.argv[1]
num_GPU = sys.argv[2]
job_name = sys.argv[3]
cmd = sys.argv[4]
# excludepool = sys.argv[5]
runningtime = partitiontime[partition_name]
if "Titan" in partition_name or "M40" in partition_name or "P100" in partition_name:
numcpuspergpu = 1
elif partition_name == "1080Ti_dbg" or partition_name == "1080Ti_special":
numcpuspergpu = 8
else:
numcpuspergpu = 1
num_CPU = numcpuspergpu * num_GPU
# Parse arguments:
pattern = "-([A-Za-z\_]*)\ ([A-Za-z\_\ 0-9\/\.]*)"
parameters = dict(re.findall(pattern, cmd))
parameters = dict([(k.strip(), v.strip()) for k, v in parameters.items()])
save_model = parameters["save_model"]
save_dir = os.path.dirname(save_model)
print("Command: {}".format(cmd))
print("Save_dir: {}".format(save_dir))
logging.basicConfig(format='%(asctime)s,%(msecs)d %(levelname)-8s [%(filename)s:%(lineno)d] %(message)s', # format='%(asctime)s %(levelname)-8s %(message)s',
filename=save_dir + "/sub_log." + job_name + '.txt',
level=logging.DEBUG,
datefmt='%Y-%m-%d %H:%M:%S')
logging.info(cmd)
def checkstate(job_name):
jobinfo = subprocess.check_output(
["squeue", "-u", "boxiang", "-n", job_name])
jobid = re.findall(r'\b\d+\b', str(jobinfo))[0]
commandslurm = ["scontrol", "show", "job", jobid]
output = subprocess.check_output(commandslurm).decode("utf-8")
alljobinfo = dict(re.findall("([A-Za-z/]*)=([^ \t\n]*)", output))
return alljobinfo['JobState'], jobid, alljobinfo["NodeList"]
while True:
timediff = (time.time() - starttime)
logging.info("full running hours: %6.2f for job: %s " %
(timediff / 3600, job_name))
try:
latest_checkpoint = max(glob.iglob(
save_model + '_step_*.pt'), key=os.path.getctime)
trainfrom = " -train_from " + latest_checkpoint
new_cmd = cmd + trainfrom
print
logging.info("restart %s and load previouse model from %s" %
(job_name, latest_checkpoint))
slurm_com = "sbatch --gres=gpu:%s --ntasks=1 "\
"--cpus-per-task %s --partition=%s --job-name=%s "\
"--wrap \"%s \" --output=%s/slurm_log.%s.txt;" % (
str(num_GPU), str(num_CPU), partition_name, \
job_name, new_cmd, save_dir, job_name)
print("Slurm command: {}".format(slurm_com))
os.system(slurm_com)
except:
new_cmd = cmd
logging.info("restart job: %s from scratch" % (job_name))
slurm_com = "sbatch --gres=gpu:%s --ntasks=1 "\
"--cpus-per-task %s --partition=%s --job-name=%s "\
"--wrap \"%s \" --output=%s/slurm_log.%s.txt;" % (
str(num_GPU), str(num_CPU), partition_name, \
job_name, new_cmd, save_dir, job_name)
print("Slurm command: {}".format(slurm_com))
os.system(slurm_com)
time.sleep(10)
# check if everything works fine
try:
jobstate, jobid, nodelist = checkstate(job_name)
starttiming = True
except:
logging.warning(
"submission error for job: %s and restart in 20 seconds." % (job_name))
time.sleep(20)
continue
# start timing only after "RUNNING"
while jobstate != "RUNNING":
logging.info("job: %s is waiting for start in 30 seconds" % (job_name))
try:
jobstate, jobid, nodelist = checkstate(job_name)
logging.info("job name is: %s, and job status is %s and jobid is %s " % (
job_name, jobstate, jobid))
time.sleep(30)
starttiming = True
except:
starttiming = False
break
if starttiming:
jobstarttime = time.time()
while time.time() - jobstarttime < onehourseconds * runningtime + 120:
try:
jobstate, jobid, nodelist = checkstate(job_name)
# only print this log for the first 15 mins.
if time.time() - jobstarttime < 60 * 15:
logging.info("waiting for job: %s to finish in %d hours on machine %s with %s with %s GPU(s) and %s CPU(s)" % (
job_name, runningtime, nodelist, partition_name, num_GPU, num_CPU))
time.sleep(60)
except:
logging.warning(
"submission error for job: %s and restart in 5 seconds." % (job_name))
time.sleep(5)
break
else:
logging.warning(
"submission error for job: %s and restart in 10 seconds." % (job_name))
time.sleep(10)
continue
<file_sep>in_dir=../processed_data/preprocess/manual_align/input/
out_dir=../processed_data/evaluation/nejm/align/hunalign/input/
mkdir -p $out_dir
for a in $in_dir/*; do
echo $a
base=$(basename $a)
# ln $a $out_dir/$base
stem=${base%.*}
awk '{print $0" | "v1","NR}' \
v1=$stem $a > \
$out_dir/$base.mark
done
head -n 40000 /mnt/data/boxiang/wmt18/train/corpus.zh > \
$out_dir/wmt18.zh
head -n 40000 /mnt/data/boxiang/wmt18/train/corpus.en > \
$out_dir/wmt18.en
[[ -f $out_dir/batch ]] && rm $out_dir/batch
for src in $out_dir/*.zh; do
tgt=${src/.zh/.en}
out=${src/.zh/.ladder}
echo -e "$src\t$tgt\t$out" >> $out_dir/batch
done
<file_sep>export onmt=~/projects/OpenNMT-py/onmt/
export data=../data/wmt19_biomed_modified/
export model=../processed_data/translation/wmt18/train_bpe/models/
export ONMT=/mnt/home/boxiang/projects/OpenNMT-py
export moses_scripts=~/software/mosesdecoder/scripts/
# Chinese sentence segmentation by Jieba:
python3 -m jieba -d ' ' < $data/medline_zh2en_zh.textonly.txt \
> $data/medline_zh2en_zh.textonly.tok.txt
# Apply BPE:
$ONMT/tools/apply_bpe.py \
-c ../processed_data/translation/wmt18/train_bpe/data/bpe-codes.joint \
< $data/medline_zh2en_zh.textonly.tok.txt \
> $data/medline_zh2en_zh.textonly.tok.bpe.txt
# Translate to English:
$onmt/bin/translate.py \
-model $model/zh2en_step_140000.pt \
-src $data/medline_zh2en_zh.textonly.tok.bpe.txt \
-output $data/medline_zh2en_zh.textonly.tok.bpe.2en.txt \
-replace_unk -verbose \
-gpu 0
# Remove BPE characters:
cat $data/medline_zh2en_zh.textonly.tok.bpe.2en.txt \
| sed -E 's/(@@ )|(@@ ?$)//g' > $data/medline_zh2en_zh.textonly.tok.2en.txt
# Tokenize English:
cat $data/medline_zh2en_en.textonly.txt | \
$moses_scripts/tokenizer/lowercase.perl | \
$moses_scripts/tokenizer/normalize-punctuation.perl -l en | \
$moses_scripts/tokenizer/tokenizer.perl -a -l en \
>> $data/medline_zh2en_en.textonly.tok.txt
# Add document-sentence markers:
paste -d "|" \
$data/medline_zh2en_zh.textonly.tok.txt \
<(awk 'BEGIN{FS="\t";OFS=","}{print $1,$2}' $data/medline_zh2en_zh.txt) \
> $data/medline_zh2en_zh.textonly.tok.mark.txt
paste -d "|" \
$data/medline_zh2en_en.textonly.tok.txt \
<(awk 'BEGIN{FS="\t";OFS=","}{print $1,$2}' $data/medline_zh2en_en.txt) \
> $data/medline_zh2en_en.textonly.tok.mark.txt
paste -d "|" \
$data/medline_zh2en_zh.textonly.tok.2en.txt \
<(awk 'BEGIN{FS="\t";OFS=","}{print $1,$2}' $data/medline_zh2en_zh.txt) \
> $data/medline_zh2en_zh.textonly.tok.2en.mark.txt
# Split by document:
for doc in `awk '{print $1}' $data/medline_zh2en_zh.txt | uniq`; do
grep $doc, $data/medline_zh2en_zh.textonly.tok.mark.txt > \
$data/separate_docs/${doc}_zh.snt
grep $doc, $data/medline_zh2en_zh.textonly.tok.2en.mark.txt > \
$data/separate_docs/${doc}_zh.2en
done
for doc in `awk '{print $1}' $data/medline_zh2en_en.txt | uniq`; do
grep $doc, $data/medline_zh2en_en.textonly.tok.mark.txt > \
$data/separate_docs/${doc}_en.snt
done
|
2edfa18568b02e63757da73254c09e195b9f4efa
|
[
"Markdown",
"Python",
"Text",
"Shell"
] | 54 |
Shell
|
boxiangliu/ParaMed
|
08484488f4829bf144303a2e348c79e4e2ae5f71
|
65e67977c88c1ce2166d08d6d40a33f6961a3486
|
refs/heads/master
|
<file_sep>import sys
import matplotlib.pyplot as plt
import numpy as np
import skvideo
def printProgressBar (iteration, total, prefix ='', suffix='', decimals=1, length=100, fill='*'):
"""
Call in a loop to create terminal progress bar
@params:
iteration - Required : current iteration (Int)
total - Required : total iterations (Int)
prefix - Optional : prefix string (Str)
suffix - Optional : suffix string (Str)
decimals - Optional : positive number of decimals in percent complete (Int)
length - Optional : character length of bar (Int)
fill - Optional : bar fill character (Str)
"""
percent = ("{0:." + str(decimals) + "f}").format(100 * (iteration / float(total)))
filledLength = int(length * iteration // total)
bar = fill * filledLength + '-' * (length - filledLength)
# print('\r%s |%s| %s%% %s' % (prefix, bar, percent, suffix), end = '\r')
# outstr = '\r%s |%s| %s%% %s : %i/%i' % (prefix, bar, percent, suffix, iteration, total)
outstr = '\r%s |%s| : %i/%i' % (prefix, bar, iteration, total)
# '{} {}'.format(1, 2)
sys.stdout.write(outstr)
sys.stdout.flush()
# Print New Line on Complete
if iteration == total:
print()
def saveVideo(videoMtrx, outfile):
# make videos
out_vstream = skvideo.io.FFmpegWriter(outfile, outputdict={
'-vcodec': 'libx264',
'-pix_fmt': 'yuv420p',
'-r': '9',
})
for frameno in range(videoMtrx.shape[0]):
# if not make a copy, after the writeFrame call the variable will be casted to a uint8
# without scaling, so will become a matrix with just 255 in all the entries
out_vstream.writeFrame(copy.deepcopy(videoMtrx[frameno,:,:]))
out_vstream.close()
def show_video(video, figsize=(10,10), autoscale=False):
import matplotlib.animation as animation
from IPython.display import HTML
fig, ax1 = plt.subplots(1, figsize=figsize, constrained_layout=True)
im = ax1.imshow(np.max(video, axis=0), cmap='gray')
fig.set_constrained_layout_pads(w_pad=0, h_pad=0., hspace=0., wspace=0.)
ax1.set_axis_off()
idx = 0
tot_frames = np.shape(video)[0]
def updatefig(idx):
im.set_array(video[idx])
if autoscale:
im.autoscale()
# ax1.set_title('Frame ' + str(idx))
return fig,
# steps = np.arange(tot_frames)
ani = animation.FuncAnimation(fig, updatefig, frames=tot_frames, interval=250, blit=True)
return HTML(ani.to_html5_video())
# plt.show()
# pass
# def show_video(video):
# import matplotlib.animation as animation
# from IPython.display import HTML
# fig, ax1 = plt.subplots(1, figsize=(10,10))
# im = ax1.imshow(np.max(video, axis=0), cmap='gray')
# ax1.set_axis_off()
# idx = 0
# tot_frames = np.shape(video)[0]
# def updatefig(idx):
# im.set_array(video[idx])
# # ax1.set_title('Frame ' + str(idx))
# return fig,
# # steps = np.arange(tot_frames)
# ani = animation.FuncAnimation(fig, updatefig, frames=tot_frames, interval=250, blit=True)
# return HTML(ani.to_html5_video())
# # plt.show()
# # pass
def eprint(*args, **kwargs):
print(*args, file=sys.stderr, **kwargs)
def shiftimg(img, shift):
tmp1 = np.zeros_like(img)
if (shift[0]>0):
tmp1[shift[0]:,:] = img[0:-shift[0],:]
elif (shift[0]<0):
tmp1[0:shift[0],:] = img[-shift[0]:,:]
else:
tmp1 = img
tmp2 = np.zeros_like(img)
if (shift[1]>0):
tmp2[:,shift[1]:] = tmp1[:, 0:-shift[1]]
elif (shift[1]<0):
tmp2[:,0:shift[1]] = tmp1[0:,-shift[1]:]
return tmp2<file_sep># chech if everything load is used..probably not...
import numpy as np
import matplotlib.pyplot as plt
import time
import sklearn.decomposition as skdec
import seaborn as sns
import matplotlib as mpl
import matplotlib.gridspec as gridspec
from skimage.io import imread
import skimage.morphology as mrph
from skimage.filters import threshold_otsu
from multiprocessing import Pool
from moro_utils import printProgressBar, eprint
def bin_video(video, binning):
"""bin_video(matrix[<X> x <Y> x <frames>], binning) -> matrix[<X/binning> x <Y/binning> x <frames>]
bin the frames of a video (averaged across <binninb> pixels"""
return np.array([np.mean([
frame[
ox:-binning-np.shape(video)[1]%binning:binning,
oy:-binning-np.shape(video)[2]%binning:binning
]
for ox in range(binning)
for oy in range(binning)
], axis=0) for frame in video]).astype(np.int16)
def crosscorr(img1, img2):
""" crosscorr(img1, img2) -> img3
perform crosscorrelation between img1 and img2 """
fft_product = (np.fft.fft2(img1) * np.fft.fft2(img2).conj())
cc_data0 = np.abs(np.fft.fftshift(np.fft.ifft2(fft_product)))
return cc_data0
# def crosscorr_max_zero(img1, img2):
# old implementation. foundamentally wrong
# """ max of the normalized crosscorrelation at zero shift """
# tmp1 = (img1-np.min(img1))/(np.max(img1)-np.min(img1))
# tmp2 = (img2-np.min(img2))/(np.max(img2)-np.min(img2))
# return np.max(tmp1*tmp2)
def zero_norm_crosscorr(img1, img2):
""" zero_norm_crosscorr(img1, img2) -> type(img1)
zero norm crosscorrelation (with zero shift)"""
return (1./np.size(img1)) * np.sum(
(1./(np.std(img1)*np.std(img2)))*(img1-np.mean(img1))*(img2-np.mean(img2))
)
def norm_crosscorr(img1, img2):
""" norm_crosscorr(img1, img2) -> type(img1)
normalized cross correlation with zero shift"""
return (1./np.size(img1)) * np.sum(
(1./(np.std(img1)*np.std(img2)))*(img1)*(img2)
)
def pupil(matrix, diameter=1):
""" pupil(matrix, radius_px=1) -> type(matrix)
overlap a circular aperture to the matrix. diameter define the diameter of the pupil as ratio with the minor axis of the matrix """
xdim, ydim = matrix.shape[:2]
radius_px = (np.min(matrix.shape[:2])/2.)*diameter
YYY, XXX = np.meshgrid(np.arange(ydim)-ydim/2.+0.5, np.arange(xdim)-xdim/2.+0.5)
pupil = np.sqrt(XXX**2+YYY**2) < radius_px
return np.multiply(matrix, pupil)
def calcium_event(t, onset, ampli=1., ton=0.179, toff=0.550):
""" calcium_event(t, onset, ampli=1., ton=0.179, toff=0.550) -> type(t)
produce a calcium event without noise, following the temporal points given with "t" and the onset of the event in "onset"
gcamp6s have time costants:
ton = 0.179s, toff = 0.55s """
y = ampli*(1-np.exp(-(t-onset)/ton))*(np.exp(-(t-onset)/toff))
# remove the negative part at the beginning
# (btw: why there is a negative part at the beginning???)
y[:int(onset/(t[1]-t[0]))+2] = 0
# normalize
# y = y-np.min(y)
# y = y/np.max(y)
return y
def calcium_train(t, onsets):
""" calcium_train(t, onsets) -> type(t)
produce a set calcium events without noise, following the temporal points given with "t" and the onset of the event in "onsets"
based on "calcium_event """
numberofbins = len(t)
ap_trace = np.zeros(numberofbins)
calcium_trace = np.zeros(numberofbins)
apno = len(onsets)
for ons in onsets:
calcium_trace = calcium_trace + calcium_event(t, ons*(t[1]-t[0]))
ap_trace[ons] = 1.
return calcium_trace, ap_trace
def make_noise(trace, gaussian=0., randomwalk=0., poisson=0.):
""" do nothing right now. must be implemented """
from moro_utils import eprint
eprint('you called make_noise, but I do nothing...')
return trace
def draw_a_disk(base_img, center, r_spot, intensity=1):
""" draw_a_disk(base_img, center, r_spot, intensity=1) -> type(base_img)
put a circle of radius "r_spot" in a certain position "center", with given intensity """
(x, y) = center
xm, ym = np.shape(base_img)
xm = int(xm/2.)
ym = int(ym/2.)
xxx, yyy = np.mgrid[-xm:xm, -ym:ym]
xxx = xxx-x
yyy = yyy-y
diskimag = ((xxx**2 + yyy**2) < r_spot**2)*intensity
return np.add(base_img, diskimag)
def draw_a_point(base_img, center, intensity=1):
""" draw_a_point(base_img, center, intensity=1) -> type(base_img)
put a point of the emitted neuron in the position "center", with given intensity """
(x, y) = np.uint16(center)
xm, ym = np.shape(base_img)
xm = np.uint16(xm/2.)
ym = np.uint16(ym/2.)
base_img[xm-x, ym-y] = intensity
return base_img
def place_rnd_pt_in_fov(rfov, timeout=500):
for i in range(timeout):
pt = np.random.rand(2)-0.5
if np.sqrt(np.sum(pt**2))<0.5:
break
return (pt[0]*rfov*2, pt[1]*rfov*2)
def place_rnd_pts_in_fov(ptno, rfov, mindist=0, timeout=500):
# place the first point
pts = np.array([place_rnd_pt_in_fov(rfov)])
# find a place for the other points
for ptidx in range(1,ptno):
for i in range(timeout):
pt = place_rnd_pt_in_fov(rfov)
if not np.min([np.sqrt(np.sum((np.subtract(pt, pp))**2)) for pp in pts])<mindist:
# print(np.min([np.sqrt(np.sum((np.subtract(pt, pp))**2)) for pp in pts]))
break
if (i+1)==timeout:
eprint('timeout reached, placing a point with no min distance constrain...')
pts = np.append(pts, [pt], axis=0)
pts = np.append(pts, [pt], axis=0)
return pts
def contrast(rawdata, decimate='None', method='max'):
""" contrast(rawdata, decimate='None', method='max') -> float
Calculate the contrast in different ways.
If no parameters are setted, just std/mean.
Decimate will evaluate the contrast and give back the max found, or the mean of the values, depenging on "method".
The decimate value is the decimation (not the step).
ratio, so dstep will be the step """
if decimate=='None':
return np.std(rawdata)/np.mean(rawdata)
[xsize, ysize] = np.shape(rawdata)
dstep = np.int16(np.min([xsize, ysize])/decimate)
tmpcontrt = []
for xx in np.arange(0, xsize, dstep):
for yy in np.arange(0, ysize, dstep):
tmp = rawdata[xx:xx+dstep, yy:yy+dstep]
tmpcontrt = np.append(tmpcontrt, np.std(tmp)/np.mean(tmp))
if method=='max':
return np.max(tmpcontrt)
elif method=='mean':
return np.mean(tmpcontrt)
# this class must be removed? and used a common structure from Python?
# maybe cen be useful an implementation with function_distance between the neurons
# (given by the autocorrelation)
# physical distance, and others..maybe is nice to keep it in a class form
# put some functions that can be useful in using the neurons as a list?
class neuron:
# at some point letthe neuron class generate his own events and update it's trace...maybe...
def __init__(self, pt, trace):
# self.x = int(pt[0])
# self.y = int(pt[1])
self.pt = pt
self.trace = trace
self.frameshape = None
self.spotsize = None
self.spot = None
self.speckles = None
self.bruit = None
def make_spot(self, frameshape, spotsize):
self.frameshape = frameshape
self.spotsize = spotsize
self.spot = draw_a_gaussian(np.zeros(frameshape), self.pt, spotsize)
return self.spot
def make_speckles(self, TM, specklesize):
if self.spot is None:
self.spot = self.make_spot(self.frameshape, self.spotsize)
# return the speckle given by that neuron, using the TM provided
# if the TM is just one matrix/entry, then all the points are just convolved
# by the speckle pattern of the single point speckle
self.TM_section = TM
self.speckles = specklize_image(self.spot, specklesize, bruit=self.bruit)
return self.speckles
class CaTrace:
def __init__(self, trace):
self.trace = trace
def __repr__(self):
return 'traccia'
def __add__(self, other):
return np.add(self.trace, other.trace)
# or ?
# return CaTrace(np.add(self.trace, other.trace))
def __len__(self):
return np.len(self.trace)
def __getitem__(self, position):
return self.trace[position]
def __add__(self, other):
return np.concatenate(self.trace, other.trace)
def make_bruit(image_shape):
return np.random.rand(image_shape[0], image_shape[1])
def make_bruit_like(image):
return make_bruit(np.shape(image))
def specklize_image(image, spekle_size, bruit=None, verbose=0):
""" speckelize the image, propagating back anf forth of a scattering medium
with the proper pupil diameter to obtain the wanted speckle size """
# n linear dimension of the n x n array to be used
# k number of samples per speckle
# the paraneter k must be used when defining the bruit, since is entangled with that,
# and so the bruit must be already setted as the bruit passed through the pupil
# if (image.shape != bruit.shape) or (image.shape[0] != image.shape[1]):
# print('image and noise must have the same size')
# return 0
# radius of the lens pupil function in pixels
# smaller the pupil, bigger the speckle grain
# n = bruit.shape[0]
# r0 = float(n)/k
# k = spekle_size
# make a new bruit if needed
if np.shape(bruit) == ():
if verbose:
print('creating a new pupil noise')
# bruit = np.random.rand(np.shape(image)[0], np.shape(image)[1])
bruit = make_bruit_like(image)
elif np.shape(bruit) != np.shape(image):
eprint('shapes of image and bruit does not match: building a new bruit matrix')
# bruit = np.random.rand(np.shape(image)[0], np.shape(image)[1])
bruit = make_bruit_like(image)
# scale the pupil of the added bruit depending to the wanted speckle size
rpupil = np.int16(np.min(np.shape(image))/spekle_size)
bruit = pupil(bruit, rpupil)
#
scatteredfield = np.fft.fft2(np.sqrt(image))
# scatteredfield = np.fft.fft2(image)
# scatteredfield = np.fft.fftshift(np.fft.fft2(np.sqrt(image)))
# calculate the field trasmitted by the lens pupil
# randomfield = np.multiply( (bruit!=0)*np.exp(1j*2*np.pi*bruit), scatteredfield)
# propagate trhough scattering
randomfield = scatteredfield*np.exp(1j*2*np.pi*bruit)
# propagate trhough pupil
randomfield = pupil(randomfield, rpupil)
# pupilfield = pupil(np.multiply(scatteredfield, randomfield),r0)
# return back into the image field
imagefield = np.fft.ifft2(randomfield)
imageintensity = np.abs(imagefield)**2
# # directly from goodman:
# but here the illumination is structured
# scatteredfield = np.multiply(
# np.sqrt(image),
# np.exp(1j*2*np.pi*bruit)
# )
# pupilfield = pupil(np.fft.fft2(scatteredfield), rpupil)
# imagefield = np.fft.ifft2(pupilfield)
# imageintensity = np.abs(imagefield)**2
return imageintensity
def pearson_crosscorr(t1, t2):
""" pearson croscorrelation between two traces/array"""
t1 = np.squeeze((t1 - np.min(t1))/(np.max(t1)-np.min(t1)))
t2 = np.squeeze((t2 - np.min(t2))/(np.max(t2)-np.min(t2)))
return ((np.correlate(t1, t2)[0])**2/(np.correlate(t1, t1)[0]*np.correlate(t2, t2)[0]))
def trace_correlation(t1, t2, method='zncc'):
""" define the trace correlation"""
if method=='zncc':
# zero norm crosscorrelation
return zero_norm_crosscorr(t1, t2)
elif method=='pcc':
# pearson cross correlation
return pearson_crosscorr(t1, t2)
def find_trace_couplings(groundtr, extractr, timeout=100):#, neuronno):
couplings = []
for i, grt in enumerate(groundtr):
correlations = []
for j, tr in enumerate(extractr):
correlations = np.append(correlations, trace_correlation(tr, grt))
couplings.append([i, np.argmax(correlations), np.max(correlations)])
# take a look if there is something with multiple match, and in case try to find a solution
for ttt in range(timeout):
matched_extraces = np.asarray([c[1] for c in couplings])
ground_idx = np.asarray([c[0] for c in couplings])
uniques = np.unique(matched_extraces, return_counts=True)
# check if there is something to rematch
multiple_match = uniques[0][uniques[1]>1]
if not np.any(multiple_match):
break
# cicla su quelli che vogliono quello stesso match
for mm in multiple_match:
to_be_rematched = ground_idx[[idx == mm for idx in matched_extraces]]
correlations = []
for idx in to_be_rematched:
correlations = np.append(correlations,\
trace_correlation(groundtr[idx], extractr[mm]))
bestmatch_idx = np.argmax(correlations)
to_be_rematched = np.delete(to_be_rematched, bestmatch_idx)
# match with the missing ones
missing = ground_idx[[mm not in matched_extraces for mm in ground_idx]]
for idx in to_be_rematched:
correlations = []
for mm in missing:
correlations = np.append(correlations,\
trace_correlation(groundtr[idx], extractr[mm]))
bestmatch_idx = missing[np.argmax(correlations)]
couplings[idx] = [idx, bestmatch_idx, np.max(correlations)]
return couplings
def print_dic(dic):
for kk in dic.keys():
if type('l') is str:
print('%s\t%s'%(kk, str(dic[kk])))
else:
print('%s\t%.3f'%(kk, dic[kk]))
pass
def gaussian(height, center_x, center_y, width_x, width_y):
"""Returns a gaussian function with the given parameters"""
width_x = float(width_x)
width_y = float(width_y)
return lambda x,y: height*np.exp(
-(((center_x-x)/width_x)**2+((center_y-y)/width_y)**2)/2)
def gaussian_profile(shape, center, width):
(x, y) = center
[x, y] = [x+shape[0]/2, y+shape[1]/2]
return gaussian(1, x, y, width, width)(*np.indices(shape))
def draw_a_gaussian(img, center, width, intensity=1):
shape = np.shape(img)
(x, y) = center
[x, y] = [x+shape[0]/2, y+shape[1]/2]
return np.add(img, gaussian(intensity, x, y, width, width)(*np.indices(shape)))
def gaussian_donut(shape, inner_width, outer_width):
if outer_width==0:
return -gaussian(1, shape[0]/2, shape[1]/2, inner_width, inner_width)(*np.indices(shape))
elif inner_width==0:
return gaussian(1, shape[0]/2, shape[1]/2, outer_width, outer_width)(*np.indices(shape))
else:
return gaussian(1, shape[0]/2, shape[1]/2, outer_width, outer_width)(*np.indices(shape))-\
gaussian(1, shape[0]/2, shape[1]/2, inner_width, inner_width)(*np.indices(shape))
def gauss_don_filt(matrix, hp = 0, lp = np.inf):
# lp is the lowpass filer cutoff
# hp is the highpass filter cutoff
# must be called only once the gaussian_donut
# so if is a video or a single frame, understand it, and do it once
if len(np.shape(matrix))>2:
if np.isinf(lp):
lp = 2*np.max(np.shape(matrix)[1:3])
fftfiltermask = gaussian_donut(np.shape(matrix)[1:3], hp, lp)
filtered = np.array([
np.abs(np.fft.ifft2(np.multiply(np.fft.fft2(frame), np.fft.fftshift(fftfiltermask))))
for frame in matrix
])
return filtered.astype(np.int16)
elif len(np.shape(matrix))==2:
if np.isinf(lp):
lp = 2*np.max(np.shape(matrix))
fftfiltermask = gaussian_donut(matrix.shape, hp, lp)
filt_fourier = np.multiply(np.fft.fft2(matrix), np.fft.fftshift(fftfiltermask))
return np.abs(np.fft.ifft2(filt_fourier)).astype(np.int16)
def gauss_don_filt_GPU(video, hp = 0, lp = np.inf):
# lp is the lowpass filer cutoff
# hp is the highpass filter cutoff
# must be called only once the gaussian_donut
# so if is a video or a single frame, understand it, and do it once
import pycuda.autoinit
import pycuda.gpuarray as gpuarray
import skcuda.fft as cu_fft
if len(np.shape(video))>2:
if np.isinf(lp):
lp = 2*np.max(np.shape(video)[1:3])
mask = gaussian_donut(np.shape(video)[1:3], hp, lp)
n1, n2 = mask.shape
mask = mask.astype('complex64')
# prepare the plans
plan_forward = cu_fft.Plan((n1, n2), np.float32, np.complex64)
plan_backward = cu_fft.Plan((n1, n2), np.complex64, np.float32)
# preallocate the filtered video
filtvideo = np.zeros_like(video)
for idx,frame in enumerate(video):
# Convert the input array to single precision float
frame = frame.astype('float32')
# From numpy array to GPUarray
framegpu = gpuarray.to_gpu(frame)
# Initialise output GPUarrays
fftframegpu = gpuarray.empty((n1,n2//2 + 1), np.complex64)
filteredframegpu = gpuarray.empty((n1,n2), np.float32)
# Forward FFT
cu_fft.fft(framegpu, fftframegpu, plan_forward)
# filter the FFT
# linalg.multiply(maskgpu, fftframegpu, overwrite=True)
####### here going back and forth with the GPU ram since something does not work with thje nvcc compiler...
left = fftframegpu.get()
if n2//2 == n2/2:
right = np.roll(np.fliplr(np.flipud(fftframegpu.get()))[:,1:-1],1,axis=0)
else:
right = np.roll(np.fliplr(np.flipud(fftframegpu.get()))[:,:-1],1,axis=0)
fftframe = np.hstack((left,right)).astype('complex64')
####
fftframe = np.multiply(np.fft.fftshift(mask), fftframe).astype('complex64')
# From numpy array to GPUarray. Take only the first n2/2+1 non redundant FFT coefficients
fftframe = np.asarray(fftframe[:,0:n2//2 + 1], np.complex64)
#### returin back to the GPU
fftframegpu = gpuarray.to_gpu(fftframe)
# Backward FFT
cu_fft.ifft(fftframegpu, filteredframegpu, plan_backward)
filtvideo[idx] = np.abs(filteredframegpu.get()/n1/n2)
return filtvideo.astype(np.int16)
else:
# only one frame; exactly what is done above, but with just one frame
frame = video
if np.isinf(lp):
lp = 2*np.max(np.shape(frame))
mask = gaussian_donut(np.shape(frame), hp, lp)
n1, n2 = mask.shape
mask = mask.astype('complex64')
# prepare the plans
plan_forward = cu_fft.Plan((n1, n2), np.float32, np.complex64)
plan_backward = cu_fft.Plan((n1, n2), np.complex64, np.float32)
# preallocate the filtered frame
filtframe = np.zeros_like(frame)
# Convert the input array to single precision float
frame = frame.astype('float32')
# From numpy array to GPUarray
framegpu = gpuarray.to_gpu(frame)
# Initialise output GPUarrays
fftframegpu = gpuarray.empty((n1,n2//2 + 1), np.complex64)
filteredframegpu = gpuarray.empty((n1,n2), np.float32)
# Forward FFT
cu_fft.fft(framegpu, fftframegpu, plan_forward)
# filter the FFT
# linalg.multiply(maskgpu, fftframegpu, overwrite=True)
####### here going back and forth with the GPU ram since something does not work with thje nvcc compiler...
left = fftframegpu.get()
if n2//2 == n2/2:
right = np.roll(np.fliplr(np.flipud(fftframegpu.get()))[:,1:-1],1,axis=0)
else:
right = np.roll(np.fliplr(np.flipud(fftframegpu.get()))[:,:-1],1,axis=0)
fftframe = np.hstack((left,right)).astype('complex64')
####
fftframe = np.multiply(np.fft.fftshift(mask), fftframe).astype('complex64')
# From numpy array to GPUarray. Take only the first n2/2+1 non redundant FFT coefficients
fftframe = np.asarray(fftframe[:,0:n2//2 + 1], np.complex64)
#### returin back to the GPU
fftframegpu = gpuarray.to_gpu(fftframe)
# Backward FFT
cu_fft.ifft(fftframegpu, filteredframegpu, plan_backward)
filtframe = np.abs(filteredframegpu.get()/n1/n2)
return filtframe.astype(np.int16)
def extract_traces_from_mat(matfile):
import scipy.io as sio
grtrh = sio.loadmat(matfile)
# the mat lab variable 'pat' can be changed
return grtrh['pat']
def extract_from_mat(matfile, var=None):
""" from the file "matfile" extract the variable "var" (string) """
try:
import scipy.io as sio
f = sio.loadmat(matfile)
except NotImplementedError:
import h5py
f = h5py.File(matfile, 'r')
if var==None:
print(list(f.keys()))
return 0
else:
return f[var]
return matvar[var]
def plot_components_and_gt(extr_traces, extr_speckles, grtrh_traces, grtrh_speckles=None, couplings=None, outfile=None):
import seaborn as sns
import matplotlib as mpl
import matplotlib.gridspec as gridspec
components = np.shape(extr_traces)[0]
if (couplings is 'couple'):
couplings = find_trace_couplings(grtrh_traces, extr_traces)
elif (couplings is None):
# use 1:1 couplings
couplings = [[idx, idx, 0] for idx in range(components)]
colors = sns.color_palette("Set2", components)
mpl.style.use('seaborn')
trfig = plt.figure(figsize=(20,20))
axgrid = gridspec.GridSpec(components*2, 20)
for [idx, extridx, coup] in couplings:
idx = int(idx)
slot = idx*2
extridx = int(extridx)
if (grtrh_speckles is not None):
# plot speckle ground truth
plt.subplot(axgrid[ slot:slot+2 , 0:3])
# cmin = np.mean(grtrh_speckles[idx])-3*np.std(grtrh_speckles[idx])
# cmax = np.mean(grtrh_speckles[idx])+3*np.std(grtrh_speckles[idx])
cmin = np.min(grtrh_speckles[idx])
cmax = np.max(grtrh_speckles[idx])
# cmap = sns.cubehelix_palette(light=1, as_cmap=True)
plt.imshow(grtrh_speckles[idx], cmap='Greys_r', clim=[cmin, cmax])
plt.yticks([]), plt.xticks([])
# plot found speckle
plt.subplot(axgrid[ slot:slot+2 , 3:6])
# cmin = np.mean(extr_speckles[extridx])-3*np.std(extr_speckles[extridx])
# cmax = np.mean(extr_speckles[extridx])+3*np.std(extr_speckles[extridx])
cmin = np.min(extr_speckles[extridx])
cmax = np.max(extr_speckles[extridx])
# cmap = sns.cubehelix_palette(light=1, as_cmap=True)
plt.imshow(extr_speckles[extridx], cmap='Greys_r', clim=[cmin, cmax])
plt.yticks([]), plt.xticks([])
# plt.text(coupl)
# plot ground truth temporal component
plt.subplot(axgrid[ slot, 6:-1])
plt.plot(grtrh_traces[idx], color=colors[idx])
plt.yticks([]), plt.xticks([])
# plot found temporal component
plt.subplot(axgrid[ slot+1, 6:-1])
plt.plot(extr_traces[extridx], color=colors[idx])
plt.yticks([]), plt.xticks([])
plt.tight_layout()
if outfile is None:
plt.show()
else:
trfig.savefig(outfile, format='pdf', dpi=600)
# trfig.savefig(outfile)
def plot_components(Ws, Hs):
"""
if savefig is defined with a filename, then the resulting figure will be saved in a file
"""
# an idea would be to read the shapes of Ws and Hs, and then
# iterate though them...is there is only one, ok, if there are two, then there will be
# a comparison; 3 comparison in different axes; 4 or more, comparison with the first in
# one axes, and all the others in the same axes
import seaborn as sns
import matplotlib as mpl
import matplotlib.gridspec as gridspec
print(np.shape(Hs)[0])
components = int(np.shape(Hs)[0])
colors = sns.color_palette("Set2", components)
mpl.style.use('seaborn')
trfig = plt.figure(figsize=(20,20))
axgrid = gridspec.GridSpec(components, 20)
for idx in range(components):
plt.subplot( axgrid[ idx, 2:-1])
plt.plot(Ws[:,idx], color=colors[idx])
plt.yticks([]), plt.xticks([])
plt.subplot(axgrid[ idx:idx , 0:1])
cmin = np.mean(Hs[idx,:,:])-3*np.std(Hs[idx,:,:])
cmax = np.mean(Hs[idx,:,:])+3*np.std(Hs[idx,:,:])
cmap = sns.cubehelix_palette(light=1, as_cmap=True)
plt.imshow(Hs[idx,:,:], cmap=cmap, clim=[cmin, cmax])
plt.yticks([]), plt.xticks([])
plt.show()
def rebuild_video(Ws, Hs):
""" rebuild the video from the components: video - Ws Hs
Ws is the temporal activity
Hs is the speckle patterns """
# must be include some error check. as shape across the matrices
# must be done with footprints and traces in input
framesize = np.shape(Hs)[-1]
frames = np.shape(Ws)[0]
components = np.shape(Hs)[0]
recontr_video = np.matmul(Ws, np.reshape(Hs,(components, framesize**2)))
recontr_video = np.reshape(recontr_video, (frames, framesize, framesize))
return recontr_video
def reconstruction_fidelity(Ws, Hs, video):
def normalize(video):
video = video - np.min(video)
video = video/np.max(video)
return video
recontr_video = rebuild_video(Ws, Hs)
# return np.sqrt(np.mean(np.square(normalize(recontr_video)-normalize(video))))
return 1- np.std(normalize(recontr_video)-normalize(video))/np.mean(normalize(video))
def plot_correlations(ax1, ax2, fig, traces_cc, footprints_cc):
from mpl_toolkits.axes_grid1 import make_axes_locatable, axes_size
aspect = 20
pad_fraction = 0.5
# im1 = ax1.imshow(traces_cc, cmap='BuGn', clim=[0,1], interpolation = 'nearest')
# im1 = ax1.imshow(traces_cc, cmap='BuGn', interpolation = 'nearest')
im1 = ax1.imshow(traces_cc, cmap='BrBG', interpolation = 'nearest', clim=[-1,1])
divider = make_axes_locatable(ax1)
width = axes_size.AxesY(ax1, aspect=1./aspect)
pad = axes_size.Fraction(pad_fraction, width)
cax1 = divider.append_axes("right", size=width, pad=pad)
cbar = fig.colorbar(im1, ax=ax1, cax=cax1)
cbar.set_label('zero norm. cross-correlation')
ax1.set_xlabel('extracted traces')
ax1.set_ylabel('ground truth traces')
ax1.set_title('traces')
ax1.set_xticks([])
ax1.set_yticks([])
# im2 = ax2.imshow(footprints_cc, cmap='BuGn', clim=[0,1], interpolation = 'nearest')
im2 = ax2.imshow(footprints_cc, cmap='BrBG', interpolation = 'nearest', clim=[-1,1])
divider = make_axes_locatable(ax2)
width = axes_size.AxesY(ax2, aspect=1./aspect)
pad = axes_size.Fraction(pad_fraction, width)
cax2 = divider.append_axes("right", size=width, pad=pad)
cbar = fig.colorbar(im2, ax=ax2, cax=cax2)
cbar.set_label('zero norm. cross-correlation')
ax2.set_xlabel('extracted footprints')
ax2.set_ylabel('ground truth footprints')
ax2.set_title('footprints')
ax2.set_xticks([])
ax2.set_yticks([])
fig.tight_layout()
def pvalue_stars(p):
ss = ''
if p>=0.05: return 'ns'
if p<0.05: ss +='*'
if p<0.01: ss +='*'
if p<0.001: ss +='*'
if p<0.0001: ss +='*'
return ss
def build_cc_mtrxs(grtrh_traces, extr_traces, grtrh_footprints, extr_footprints, method='zncc', halfmatrix=True):
couplings = np.array(find_trace_couplings(grtrh_traces, extr_traces))
# sns.set_style("white")
footprints_cc = np.zeros((len(couplings), len(couplings)))
traces_cc = np.zeros((len(couplings), len(couplings)))
for idx1 in range(len(couplings)):
for idx2 in range(idx1, len(couplings)):
img1 = grtrh_footprints[idx1]
img2 = extr_footprints[int(couplings[idx2][1])]
tr1 = grtrh_traces[idx1]
tr2 = extr_traces[int(couplings[idx2][1])]
if method=='zncc':
footprints_cc[idx2, idx1] = zero_norm_crosscorr(img1, img2)
traces_cc[idx2, idx1] = zero_norm_crosscorr(tr1, tr2)
if not halfmatrix:
footprints_cc[idx1, idx2] = zero_norm_crosscorr(img2, img1)
traces_cc[idx1, idx2] = zero_norm_crosscorr(tr2, tr1)
elif method=='ncc':
footprints_cc[idx2, idx1] = norm_crosscorr(img1, img2)
traces_cc[idx2, idx1] = norm_crosscorr(tr1, tr2)
if not halfmatrix:
footprints_cc[idx1, idx2] = norm_crosscorr(img2, img1)
traces_cc[idx1, idx2] = norm_crosscorr(tr2, tr1)
elif method=='mixed':
footprints_cc[idx2, idx1] = zero_norm_crosscorr(img1, img2)
traces_cc[idx2, idx1] = norm_crosscorr(tr1, tr2)
if not halfmatrix:
footprints_cc[idx1, idx2] = zero_norm_crosscorr(img2, img1)
traces_cc[idx1, idx2] = norm_crosscorr(tr2, tr1)
return traces_cc, footprints_cc
<file_sep># moroUtils
random utilities for speckles and friends
|
7dfdc8a545eb0bb73c3509428d0e9596a5b74aec
|
[
"Markdown",
"Python"
] | 3 |
Python
|
m0ro/moroUtils
|
f52f4699afc2efd0e1ec6ffcd1b2660f06df0b93
|
a89a2b115733c0a2e3fbf78cec74b49932d03757
|
refs/heads/master
|
<repo_name>amirabbas-ranjbar/AngularSample<file_sep>/Amir/Content/script/app/app.js
var app = angular.module('myApp', [
'ngMaterial',
'ngMessages',
'ngSanitize',
//'ADM-dateTimePicker',
// 'ui.bootstrap',
'blockUI',
'ngResource'
]);
angular.module('myApp').config(function (blockUIConfig) {
blockUIConfig.message = '';
blockUIConfig.delay = 100;
blockUIConfig.requestFilter = function (config) {
if (config.url.match(/noblockui/gi)) {
return false;
}
};
});
app.factory('service', function ($http, $q, $mdToast) {
return {
post: post,
get: get,
showToast: showToast,
showError: showError,
showSuccess: showSuccess,
show: show,
}
function post(url, data) {
var deferred = $q.defer();
$http({
method: "POST",
dataType: 'json',
headers: { 'Content-Type': 'application/json; charset=utf-8' },
data: data ? JSON.stringify(data) : null,
url: url
}).then(function success(response) {
deferred.resolve(response.data);
}, function error(response) {
deferred.reject(response);
});
return deferred.promise;
}
function get(url) {
var deferred = $q.defer();
$http({
method: "GET",
dataType: 'json',
headers: { 'Content-Type': 'application/json; charset=utf-8' },
url: url
}).then(function success(response) {
deferred.resolve(response.data);
}, function error(response) {
deferred.reject(response);
});
return deferred.promise;
}
function showToast(message) {
if (message && message.Text) {
$mdToast.show(
$mdToast.simple()
.textContent(message.Text).position('top left').theme(message.Type).hideDelay(1000)
);
} else {
$mdToast.show(
$mdToast.simple()
.textContent('خطای ناشناخته.').position('top left').theme('warning').hideDelay(5000)
);
}
}
function showError(text) {
showToast({ Text: text, Type: 'error' });
}
function showSuccess(text) {
showToast({ Text: text, Type: 'success' });
}
function show(text) {
$mdToast.show(
$mdToast.simple().textContent(text).hideDelay(1000)
);
}
});
<file_sep>/Amir/Content/script/app/index/indexCtrl.js
app.controller('indexCtrl', function ($scope, $mdToast, $mdDialog, service, indexService) {
var self = this;
var myService = indexService;
self.test = {
id: 1,
name: 'ali'
};
self.data = {
id: 1,
list: [
{
id: 1,
name: 'ali',
className: 'x1'
},
{
id: 2,
name: '<NAME>',
className: 'x2'
},
{
id: 3,
name: '<NAME>',
className: 'x3'
}
]
};
myService.getAll().then(function (data) {
self.data.list = data;
});
self.user = {
Id: null,
Name: null
}
self.handleAddUser = function () {
var param = self.user;
myService.persist(param).then(function (data) {
self.data.list.push(data);
service.showSuccess('انجام شد');
});
}
self.handleShowEditPanel = function (item) {
self.showEditPanelFlag = true;
self.user.Id = item.Id;
self.user.Name = item.Name;
}
self.handlePersistUser = function () {
self.showEditPanelFlag = false
var param = self.user;
myService.persist(param).then(function (data) {
self.data.list.push(data);
service.showSuccess('انجام شد');
});
}
self.handleDeleteUser = function (ev, x) {
var confirm = $mdDialog.confirm()
.title('حذف کاربر')
.textContent('ایا از حذف کاربر ' + x.Name + ' مطمئنید؟')
.ariaLabel('')
.clickOutsideToClose(true)
.targetEvent(ev)
.cancel('انصــراف')
.ok('حـــذف');
$mdDialog.show(confirm).then(function () {
var param = { Id: x.Id };
myService.delete(param).then(function (data) {
// self.data.list.push(data);
//service.showSuccess('انجام شد');
});
});
}
});<file_sep>/Amir/Controllers/HomeController.cs
using Amir.Model;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
namespace Amir.Controllers
{
public class HomeController : Controller
{
public ActionResult Index()
{
return View();
}
public ActionResult About()
{
ViewBag.Message = "Your application description page.";
return View();
}
public ActionResult Contact()
{
ViewBag.Message = "Your contact page.";
return View();
}
public JsonResult GetAll()
{
List<User> list = new List<User>();
list.Add(new User()
{
Id = 1,
Name = "Amir"
});
list.Add(new User()
{
Id = 2,
Name = "Rohollah"
});
return Json(list, JsonRequestBehavior.AllowGet);
}
[HttpPost]
public JsonResult Persist(User user)
{
user.Id = user.Id * 10;
return Json(user, JsonRequestBehavior.AllowGet);
}
[HttpPost]
public JsonResult Delete(User user)
{
return Json(user, JsonRequestBehavior.AllowGet);
}
}
}
|
6cd6e5f90f8585c3493cbbc99ed9b3a7ba8aba39
|
[
"JavaScript",
"C#"
] | 3 |
JavaScript
|
amirabbas-ranjbar/AngularSample
|
6022d1079118edb4230cc40107b5a8cdc7181f9b
|
b2bd6670a67237b8215f885b651f3bbacc2d6ff4
|
refs/heads/master
|
<file_sep>from mne.channels import read_custom_montage
from mne import set_eeg_reference, events_from_annotations
from mne.epochs import Epochs
from autoreject import AutoReject, Ransac
import numpy as np
from mne.preprocessing.ica import ICA, corrmap, read_ica
import os
from pathlib import Path
from matplotlib import pyplot as plt, patches
from mne.io import read_raw_brainvision
def run_pipeline(raw, parameters, out_folder):
"""
Do all the preprocessing steps according to the parameters. Processed data,
log files and plots are saved in out_folder.
"""
global _out_folder
_out_folder = out_folder
if "filtering" in parameters: # STEP1: filter the data
print("removing power line noise...")
raw = filtering(raw, **parameters["filtering"])
if "epochs" in parameters: # STEP2: epoch the data
epochs = Epochs(raw, events_from_annotations(raw)[0],
**parameters["epochs"], preload=True)
del raw
# all other steps work on epoched data:
if "rereference" in parameters: # STEP3: re-reference the data
print("computing robust average reference...")
epochs = robust_avg_ref(epochs, parameters["rereference"])
if "ica" in parameters: # STEP4: remove blinks and sacchades
epochs, ica = reject_ica(epochs, **parameters["ica"])
if "interpolate" in parameters: # STEP5: interpolate bad channels
print("interpolating bad channels...")
interpolate_bads(epochs, parameters["interpolate"])
if "reject" in parameters: # STEP6: epoch rejection / reparation
print("repairing / rejecting bad epochs")
epochs = reject_epochs(epochs, parameters["reject"])
return epochs, ica
else:
return raw
def reject_epochs(epochs, autoreject_parameters):
ar = AutoReject(**autoreject_parameters, verbose="tqdm")
# for event in epochs.event_id.keys():
# epochs[event] = ar.fit_transform(epochs[event])
epochs = ar.fit_transform(epochs)
fig, ax = plt.subplots(2)
# plotipyt histogram of rejection thresholds
ax[0].set_title("Rejection Thresholds")
ax[0].hist(1e6 * np.array(list(ar.threshes_.values())), 30,
color='g', alpha=0.4)
ax[0].set(xlabel='Threshold (μV)', ylabel='Number of sensors')
# plot cross validation error:
loss = ar.loss_['eeg'].mean(axis=-1) # losses are stored by channel type.
im = ax[1].matshow(loss.T * 1e6, cmap=plt.get_cmap('viridis'))
ax[1].set_xticks(range(len(ar.consensus)))
ax[1].set_xticklabels(['%.1f' % c for c in ar.consensus])
ax[1].set_yticks(range(len(ar.n_interpolate)))
ax[1].set_yticklabels(ar.n_interpolate)
# Draw rectangle at location of best parameters
idx, jdx = np.unravel_index(loss.argmin(), loss.shape)
rect = patches.Rectangle((idx - 0.5, jdx - 0.5), 1, 1, linewidth=2,
edgecolor='r', facecolor='none')
ax[1].add_patch(rect)
ax[1].xaxis.set_ticks_position('bottom')
ax[1].set(xlabel=r'Consensus percentage $\kappa$',
ylabel=r'Max sensors interpolated $\rho$',
title='Mean cross validation error (x 1e6)')
fig.colorbar(im)
fig.tight_layout()
fig.savefig(_out_folder/Path("reject_epochs.pdf"), dpi=800)
plt.close()
return epochs
def filtering(raw, notch=None, highpass=None, lowpass=None,
fir_window="hamming", fir_design="firwin"):
"""
Filter the data. Make a 2 by 2 plot with time
series data and power spectral density before and after.
"""
fig, ax = plt.subplots(2, sharex=True, sharey=True)
fig.suptitle("Power Spectral Density")
ax[0].set_title("before removing power line noise")
ax[1].set_title("after removing power line noise")
ax[1].set(xlabel="Frequency (Hz)", ylabel="μV²/Hz (dB)")
ax[0].set(xlabel="Frequency (Hz)", ylabel="μV²/Hz (dB)")
raw.plot_psd(average=True, area_mode=None, ax=ax[0], show=False)
if notch is not None: # notch filter at 50 Hz and harmonics
raw.notch_filter(freqs=notch, fir_window=fir_window,
fir_design=fir_design)
if lowpass is not None: # lowpass filter at 50 Hz
raw.filter(h_freq=lowpass, l_freq=None, fir_window=fir_window,
fir_design=fir_design)
if highpass is not None: # lowpass filter at 50 Hz
raw.filter(h_freq=None, l_freq=highpass, fir_window=fir_window,
fir_design=fir_design)
raw.plot_psd(average=True, area_mode=None, ax=ax[1], show=False)
fig.tight_layout()
fig.savefig(_out_folder/Path("remove_power_line_noise.pdf"), dpi=800)
plt.close()
return raw
def read_brainvision(fname, apply_montage=True, preload=False):
"""Load brainvision data. If apply_montage=True, load and apply the standard
montage for the 64-channel acticap. If add_ref=True add a reference
channel with all zeros"""
raw = read_raw_brainvision(fname, preload=preload)
if apply_montage:
mapping = {"1": "Fp1", "2": "Fp2", "3": "F7", "4": "F3", "5": "Fz",
"6": "F4", "7": "F8", "8": "FC5", "9": "FC1", "10": "FC2",
"11": "FC6", "12": "T7", "13": "C3", "14": "Cz", "15": "C4",
"16": "T8", "17": "TP9", "18": "CP5", "19": "CP1",
"20": "CP2", "21": "CP6", "22": "TP10", "23": "P7",
"24": "P3", "25": "Pz", "26": "P4", "27": "P8", "28": "PO9",
"29": "O1", "30": "Oz", "31": "O2", "32": "PO10",
"33": "AF7", "34": "AF3", "35": "AF4", "36": "AF8",
"37": "F5", "38": "F1", "39": "F2", "40": "F6", "41": "FT9",
"42": "FT7", "43": "FC3", "44": "FC4", "45": "FT8",
"46": "FT10", "47": "C5", "48": "C1", "49": "C2",
"50": "C6", "51": "TP7", "52": "CP3", "53": "CPz",
"54": "CP4", "55": "TP8", "56": "P5", "57": "P1",
"58": "P2", "59": "P6", "60": "PO7", "61": "PO3",
"62": "POz", "63": "PO4", "64": "PO8"}
raw.rename_channels(mapping)
montage = read_custom_montage(
Path(os.environ["EXPDIR"])/Path("AS-96_REF.bvef"))
raw.set_montage(montage)
return raw
def interpolate_bads(epochs, ransac_parameters):
ransac = Ransac(**ransac_parameters, verbose="tqdm")
evoked = epochs.average() # for plotting
epochs = ransac.fit_transform(epochs)
evoked.info["bads"] = ransac.bad_chs_
# plot evoked response with and without interpolated bads:
fig, ax = plt.subplots(2)
evoked.plot(exclude=[], axes=ax[0], show=False)
ax[0].set_title('Before RANSAC')
evoked = epochs.average() # for plotting
evoked.info["bads"] = ransac.bad_chs_
evoked.plot(exclude=[], axes=ax[1], show=False)
ax[1].set_title('After RANSAC')
fig.tight_layout()
fig.savefig(_out_folder/Path("interpolate_bad_channels.pdf"), dpi=800)
plt.close()
return epochs
def robust_avg_ref(epochs, ransac_parameters, apply=True):
"""
Create a robust average reference by first interpolating the bad channels
to exclude outliers. The reference is applied as a projection. Return
epochs with reference projection applied if apply=True
"""
ransac = Ransac(**ransac_parameters, verbose="tqdm")
epochs_tmp = epochs.copy()
epochs_tmp = ransac.fit_transform(epochs)
set_eeg_reference(epochs_tmp, ref_channels="average", projection=True)
robust_avg_proj = epochs_tmp.info["projs"][0]
del epochs_tmp
epochs.info["projs"].append(robust_avg_proj)
if apply:
epochs.apply_proj()
return epochs
def reject_ica(inst, reference, n_components=0.99, method="fastica",
corr_thresh=0.9, random_state=None, plot=False):
if isinstance(reference, str):
reference = read_ica(reference)
ica = ICA(n_components=n_components, method=method)
ica.fit(inst)
labels = list(reference.labels_.keys())
components = list(reference.labels_.values())
for component, label in zip(components, labels):
corrmap([reference, ica], template=(0, component[0]),
plot=plot, label=label, threshold=corr_thresh)
exclude = [item for subl in list(ica.labels_.values()) for item in subl]
ica.apply(inst, exclude=exclude)
return inst, ica
|
b88a389f1b2d854fcc0eef3da24cd2d2c8e5bb82
|
[
"Python"
] | 1 |
Python
|
alebrrr/mne_addon
|
1b394e8ae021313db7a6bf76da48aa995f8af4ae
|
2299547c47805fe97a82ad8fd60b56ff3fde4f8d
|
refs/heads/master
|
<repo_name>RunningCalf/DDAScripts<file_sep>/Emma2DDA_Lnx.py
#!/usr/bin/python
# In stability tests, they need to treat EMMA cube as DDA mode. To do this, we need add following lines in function CDSSReportDefinition::Read(…)
# in file /COM/SourceCode/DSSReport/CDSSReportDefinition.cpp
# void CDSSReportDefinition::Read(…)
# {
# …
# // Treat all emma cube as DDA mode
# if (!mEmmaCubeDefnPtr.IsNull())
# {
# mDatasetServeMode.mValue = DssDatasetServeAsModel;
# }
# }
import os
import sys
import re
if len(sys.argv) < 2:
print len(sys.argv)
sys.exit(1)
srcFileName = sys.argv[1]
srcFile = open(srcFileName, 'r')
targetFile = open("srcTemp", 'w')
isFuncStart = False
isFuncEnd = False
isFuncNameFound = False
braceCnt = 0
for line in srcFile.readlines():
if not isFuncNameFound and re.search('void\s+CDSSReportDefinition::Read\(', line):
isFuncNameFound = True
if re.search('{', line):
braceCnt += 1
isFuncStart = True
if isFuncNameFound and not isFuncEnd:
if re.search('{', line):
braceCnt += 1
isFuncStart = True
if re.search('}', line):
braceCnt -= 1
if braceCnt == 0 and isFuncStart and not isFuncEnd:
isFuncEnd = True
index = line.rfind('}')
if index == 0:
line = '\n\tif (!mEmmaCubeDefnPtr.IsNull())\n\t{\n\t\tmDatasetServeMode.mValue = DssDatasetServeAsModel;\n\t}\n' + line
else:
line = line[:index-1] + '\n\tif (!mEmmaCubeDefnPtr.IsNull())\n\t{\n\t\tmDatasetServeMode.mValue = DssDatasetServeAsModel;\n\t}\n' + line[index:]
targetFile.write(line)
srcFile.close()
targetFile.close()
os.system('mv -f srcTemp ' + srcFileName)
<file_sep>/DDA_Dbg_Build.sh
#!/bin/bash
# This scripts is used to compile target project /COM/Projects/ObjectServer/
# But before doing that we need to compile /COM/IDL first to generate some header files.
# Generate DSSCOMMaster/DSSCOMMaster_i.h
echo "========================================="
echo "Generate Header Files"
echo "========================================="
cd /home/cp2/views/cp2_DDA_lnx_dbg_view/COM/IDL
../../BuildScripts/one.pl -one -notest
make -f MakefileLinux -B
# Compile target project.
echo "========================================="
echo "Build ObjectServer Project"
echo "========================================="
cd /home/cp2/views/cp2_DDA_lnx_dbg_view/COM/Projects/ObjectServer/
../../../BuildScripts/one.pl -one -notest
make -f MakefileLinux
<file_sep>/DDA_Rel_Build.sh
#!/bin/bash
# This scripts is used to compile target project /COM/Projects/ObjectServer/
# But before doing that we need to compile /COM/IDL first to generate some header files.
# Generate DSSCOMMaster/DSSCOMMaster_i.h
echo "========================================="
echo "Generate Head Files"
echo "========================================="
cd /home/cp2/views/cp2_DDA_lnx_rel_view/COM/IDL
../../BuildScripts/one.pl -m Release -one -notest
make -f MakefileLinux -B
echo "========================================="
echo "Build ObjectServer Project"
echo "========================================="
cd /home/cp2/views/cp2_DDA_lnx_rel_view/COM/Projects/ObjectServer/
../../../BuildScripts/one.pl -m Release -one -notest
make -f MakefileLinux
<file_sep>/README.md
# DDAScripts
File Description:
For Linux:
sync_d.sh : Code updating and binaries copying for Linux debug view;
sync_r.sh : Code updating and binaries copying for Linux release view;
DDA_Dbg_Build.sh: Build target project(ObjectServer) for Linux debug view;
DDA_Rel_Build.sh: Build target project(ObjectServer) for Linux release view;
Emma2DDA_Lnx.py : Modify code to force to treat all report as DDA;
For Windows:
sync_dbg_Win.bat: Code updating and binaries copying for Windows debug view;
sync_rel_Win.bat: Code updating and binaries copying for Windows release view;
Emma2DDA_Win.py : Modify code to force to treat all report as DDA.
<file_sep>/sync_r.sh
#! /bin/bash
mstr="<EMAIL>"
if [[ `whoami` != 'cp2' ]] ; then
echo "run it with the cp2 account"
exit 1
fi
if [[ -z ${BuildNum} ]]; then
echo "please provide a build number";
exit 1
fi
#if pwd | grep -q rel_view ; then
mode="rel"
view="/home/cp2/views/cp2_DDA_lnx_rel_view"
copyto="/user4/Builds/${BuildNum}/RELEASE/BIN/copyto.pl"
#fi
echo "cd $view"
cd $view
echo "cleartool update -over BuildScripts/conspecs"
cleartool update -over BuildScripts/conspecs
if [ ! -f BuildScripts/conspecs/${BuildNum}_cs.txt ]; then
echo "cs file for build $BuildNum not found"
echo `pwd`/BuildScripts/conspecs/${BuildNum}_cs.txt
exit 1
fi
cs="cs.txt"
if [ -f $cs ] ; then
echo "rm -f $cs"
rm -f $cs
fi
echo "cat BuildScripts/conspecs/${BuildNum}_cs.txt | grep -v load > $cs"
cat BuildScripts/conspecs/${BuildNum}_cs.txt | grep -v load > $cs
echo "cleartool catcs | grep load >> $cs"
cleartool catcs | grep load >> $cs
#unco checkout before update
echo "cleartool lsco -cview -me -s -avobs"
co_files=`cleartool lsco -cview -me -s -avobs`
if [ ! -z "$co_files" ]; then
echo "cleartool unco"
cleartool unco -rm $co_files
else
echo "No checkouts."
fi
echo "cleartool setcs -over $cs"
cleartool setcs -over $cs
if [ ! -f $copyto ]; then
echo "File $copyto not found"
exit 1
fi
echo "perl $copyto . "
yes | perl $copyto .
to="jingwang$mstr"
if [[ ! -z $2 ]]; then
cc="-c $2$mstr"
fi
from="jingwang$mstr"
#echo -e "View $view is synced to build ${BuildNum} on $(uname -n). \n\nThis is an automated email." | mail -s "Delta $mode view synced to ${BuildNum} on $(uname -n)" $to $cc -- -f $from
|
7fda204a6af5dfeac933f437c090cf41db07c3e8
|
[
"Markdown",
"Python",
"Shell"
] | 5 |
Python
|
RunningCalf/DDAScripts
|
a10b6072e4d50c0606abe9ed395c7ddf34842945
|
e433f10c21c80c6335d877aa79ed63eb7a2b3d6c
|
refs/heads/master
|
<repo_name>Egledze/AutomatinioTestavimoKursai201130<file_sep>/ManoBaigiamasisProjektas/ManoTestai/ManoTestai02.cs
using AutoPaskaitos.ManoBaigiamasisProjektas.ManoPuslapiai;
using NUnit.Framework;
namespace AutoPaskaitos.ManoBaigiamasisProjektas.ManoTestai
{
class ManoTestai02 : BazineManoTestu
{
private ManoPuslapis02 manoPuslapis02;
[SetUp]
public void PriesKiekvienaFolderioTesta()
{
manoPuslapis02 = new ManoPuslapis02(driver);
manoPuslapis02.PaspauskMygtukaDezinfekcinesPriemones();
manoPuslapis02.PaspauskLangeliSpecApsaugosPriemones();
}
[Test]
public void SuraskPreke()
{
manoPuslapis02.PaspauskLangeliKaukesRespiratoriai();
manoPuslapis02.PatikrinkArSurado();
//https://pigu.lt/lt/apsaugos-dezinfekcines-priemones/specialios-apsaugos-priemones/kaukes-respiratoriai
}
[Test]
public void SudėkIKrepseli()
{
manoPuslapis02.PaspauskLangeliKaukesRespiratoriai();
manoPuslapis02.IdekIKrepseliJuodasKaukes();
manoPuslapis02.IdekIKrepseliRespiratoriu();
manoPuslapis02.PirkPrekes();
manoPuslapis02.PatikrinkArJauKrepselyje();
}
[Test]
public void UzduokKlausimaApiePreke()
{
manoPuslapis02.PaspauskLangeliSkydeliaiIrAkiniai();
manoPuslapis02.AtverskInfoApieApsauginiSkydeli();
manoPuslapis02.UzduokKlausima();
manoPuslapis02.PatikrinkArGaliUzduotiKlausima();
}
}
}
<file_sep>/ManoBaigiamasisProjektas/ManoTestai/ManoTestai03.cs
using AutoPaskaitos.ManoBaigiamasisProjektas.ManoPuslapiai;
using NUnit.Framework;
namespace AutoPaskaitos.ManoBaigiamasisProjektas.ManoTestai
{
class ManoTestai03 : BazineManoTestu
{
private ManoPuslapis03 manoPuslapis03;
[SetUp]
public void PriesKiekvienaFolderioTesta()
{
manoPuslapis03 = new ManoPuslapis03(driver);
manoPuslapis03.PaspauskPrekybosCentras();
manoPuslapis03.PaspauskSuzinotiDaugiau();
manoPuslapis03.PasirinkParduotuvePagalRaideV();
manoPuslapis03.PaspauskVeroKoffeeHouse();
}
[Test]
public void ArEsuTinkamamePuslapyje()
{
manoPuslapis03.PaspauskIvertinimaiIrKomentarai();
manoPuslapis03.PatikrinkArEsuAtsiliepimuPuslapyje();
}
[Test]
public void ArIdejoPrekeINoruSarasa()
{
manoPuslapis03.PaspauskPerkamiausiosPrekes();
//manoPuslapis03.IsskleiskMeniu();//nebereikia
manoPuslapis03.PasirinkGeriausiaiIvertintos();
manoPuslapis03.PaspauskKavosPupelesBrazil();
manoPuslapis03.PaspauskItrauktiINoruSarasa();
manoPuslapis03.PatikrinkArIdejoINoruSarasa();
}
[Test]
public void ArIdejoPrekeIPalyginimoLanga()
{
manoPuslapis03.PaspauskKavosPupelesHola();
manoPuslapis03.PaspauskItrauktiIPalyginima();
manoPuslapis03.LygintiPrekes();
manoPuslapis03.PatikrinkArEsiPalyginimoLange();
}
}
}
<file_sep>/ManoBaigiamasisProjektas/ManoPuslapiai/ManoPuslapis01.cs
using NUnit.Framework;
using OpenQA.Selenium;
namespace AutoPaskaitos.ManoBaigiamasisProjektas.ManoPuslapiai
{
class ManoPuslapis01 : BazineManoPuslapiu
{
public ManoPuslapis01(IWebDriver driver) : base(driver) { }
private IWebElement ElAdresoLaukas => driver.FindElement(By.Name("email"));
private IWebElement SlaptazodzioLaukas => driver.FindElement(By.Name("password"));
private IWebElement PrisijungimoMygtukas => driver.FindElement(By.Name("login"));
public void NueikIprisijungimoForma()
{
driver.Url = ("https://pigu.lt/lt/u/login");
}
public void IrasykElAdresaBloga(string IrasomasBlogasElAdresas)
{
ElAdresoLaukas.SendKeys(IrasomasBlogasElAdresas);
}
public void IrasykElAdresaGera(string IrasomasGerasElAdresas)
{
ElAdresoLaukas.SendKeys(IrasomasGerasElAdresas);
}
public void IrasykSlaptazodiBloga(string IrasomasBlogasSlaptazodis)
{
SlaptazodzioLaukas.SendKeys(IrasomasBlogasSlaptazodis);
}
public void IrasykSlaptazodiGera(string IrasomasGerasSlaptazodis)
{
SlaptazodzioLaukas.SendKeys(IrasomasGerasSlaptazodis);
}
public void PaspauskPrisijungimoMygtuka()
{
PrisijungimoMygtukas.Click();
}
public void PatikrinkZinutePirma()
{
Assert.AreEqual("Klaidingai įvedėte el. pašto adresą", driver.FindElement(By.CssSelector(".has-icon > .error-message")).Text);
}
public void PatikrinkZinuteAntra()
{
Assert.AreEqual("Neteisingas slaptažodis", driver.FindElement(By.CssSelector(".error-message:nth-child(2)")).Text);
}
public void PatikrinkArPrisijunge()
{
Assert.AreEqual("Eglė", driver.FindElement(By.CssSelector(".visitor-login .inner > .text")).Text);
}
}
}
<file_sep>/ManoBaigiamasisProjektas/ManoPuslapiai/BazineManoPuslapiu.cs
using System;
using System.Collections.Generic;
using System.Text;
using OpenQA.Selenium;
namespace AutoPaskaitos.ManoBaigiamasisProjektas.ManoPuslapiai
{
public class BazineManoPuslapiu
{
protected IWebDriver driver;
public BazineManoPuslapiu(IWebDriver driver)
{
this.driver = driver;
}
}
}
<file_sep>/ManoBaigiamasisProjektas/ManoPuslapiai/ManoPuslapis02.cs
using System.Threading;
using NUnit.Framework;
using OpenQA.Selenium;
using OpenQA.Selenium.Interactions;
namespace AutoPaskaitos.ManoBaigiamasisProjektas.ManoPuslapiai
{
class ManoPuslapis02 : BazineManoPuslapiu
{
public ManoPuslapis02(IWebDriver driver) : base(driver) { }
private IWebElement DezinfekciniuPriemoniuMygtukas => driver.FindElement(By.CssSelector("#department-11345 .text"));
private IWebElement SpecApsaugosPriemoniuLangelis => driver.FindElement(By.CssSelector(".category-list-item-wrap:nth-child(2) img"));
private IWebElement KaukiuRespiratoriuLangelis => driver.FindElement(By.CssSelector(".category-list-item-wrap:nth-child(1) img"));
private IWebElement MygtukasSurastiJuodasKaukes => driver.FindElement(By.CssSelector("[title='Vienkartinės veido kaukės 3-jų sluoksnių juodos spalvos (50 vnt.)']"));
private IWebElement MygtukasDetiIKrepseliJuodasKaukes => driver.FindElement(By.XPath("(//a[contains(text(),'Į krepšelį')])[4]"));
private IWebElement MygtukasUzdarytiJuodasKaukes => driver.FindElement(By.CssSelector("#modal .close-modal"));
private IWebElement MygtukasSurastiRespiratoriu => driver.FindElement(By.CssSelector("[title='Veido kaukė 4-ių sluoksnių, 2vnt']"));
private IWebElement MygtukasDetiIKrepseliRespiratoriu => driver.FindElement(By.CssSelector("[widget-attachpoint='addToCart']")); //(By.XPath("//*[@id='productBlock31166886']/div/div/a[2]"));
private IWebElement MygtukasUzdarytiRespiratoriu => driver.FindElement(By.Id("close"));
private IWebElement MygtukasPrekiuKrepselis => driver.FindElement(By.CssSelector("#cartWidget > a > div > div > span.text"));
private IWebElement LangelisSkydeliaiIrAkiniai => driver.FindElement(By.CssSelector(".category-list-item-wrap:nth-child(5) img"));
private IWebElement LangelisApsauginisSkydelisVeidui => driver.FindElement(By.CssSelector("#productBlock30883851 > .heightResponse a > img"));
private IWebElement MygtukasKlausimaiIrAtsakymai => driver.FindElement(By.Id("question_answerTab"));
private IWebElement MygtukasUzduotiKlausima => driver.FindElement(By.CssSelector("[widget-id='question_answer']"));
private readonly string TekstasApieKlausimus = "Užduoti klausimus gali tik registruoti Pigu.lt nariai. Prašome prisijungti arba registruotis";
//Pirmas testas:
public void PaspauskMygtukaDezinfekcinesPriemones()
{
DezinfekciniuPriemoniuMygtukas.Click();
}
public void PaspauskLangeliSpecApsaugosPriemones()
{
SpecApsaugosPriemoniuLangelis.Click();
}
public void PaspauskLangeliKaukesRespiratoriai()
{
KaukiuRespiratoriuLangelis.Click();
Thread.Sleep(5000);
}
public void PatikrinkArSurado()
{
Assert.AreEqual("Kaukės, respiratoriai", driver.FindElement(By.XPath("//h1[contains(.,'Kaukės, respiratoriai')]")).Text);
}
//Antras testas:
public void IdekIKrepseliJuodasKaukes()
{
Actions builder = new Actions(driver);
builder.MoveToElement(MygtukasSurastiJuodasKaukes).Build().Perform();
MygtukasDetiIKrepseliJuodasKaukes.Click();
Thread.Sleep(3000);
MygtukasUzdarytiJuodasKaukes.Click();
Thread.Sleep(3000);
}
public void IdekIKrepseliRespiratoriu()
{
Actions builder = new Actions(driver);
builder.MoveToElement(MygtukasSurastiRespiratoriu).Build().Perform();
MygtukasSurastiRespiratoriu.Click();
Thread.Sleep(3000);
MygtukasDetiIKrepseliRespiratoriu.Click();
Thread.Sleep(3000);
MygtukasUzdarytiRespiratoriu.Click();
}
public void PirkPrekes()
{
MygtukasPrekiuKrepselis.Click();
//driver.Url = ("https://pigu.lt/lt/cart");
}
public void PatikrinkArJauKrepselyje()
{
Thread.Sleep(4000);
Assert.AreEqual("Prekių krepšelis", driver.FindElement(By.CssSelector(".current > .title")).Text);
// Assert.AreEqual("https://pigu.lt/lt/cart", driver.Url);
}
//Trecias testas:
public void PaspauskLangeliSkydeliaiIrAkiniai()
{
LangelisSkydeliaiIrAkiniai.Click();
Thread.Sleep(3000);
}
public void AtverskInfoApieApsauginiSkydeli()
{
Thread.Sleep(2000);
LangelisApsauginisSkydelisVeidui.Click();
}
public void UzduokKlausima()
{
Thread.Sleep(2000);
MygtukasKlausimaiIrAtsakymai.Click();
Thread.Sleep(2000);
MygtukasUzduotiKlausima.Click();
}
public void PatikrinkArGaliUzduotiKlausima()
{
Thread.Sleep(3000);
Assert.AreEqual(TekstasApieKlausimus, driver.FindElement(By.CssSelector("h2:nth-child(2)")).Text);
//Assert.AreEqual("https://pigu.lt/lt/namu-remontas/darbo-apranga/galvos-pasauga/apsauginis-skydelis-veidui?id=30883851", driver.Url);
}
}
}
<file_sep>/ManoBaigiamasisProjektas/ManoPuslapiai/ManoPuslapis03.cs
using System;
using System.Threading;
using NUnit.Framework;
using OpenQA.Selenium;
namespace AutoPaskaitos.ManoBaigiamasisProjektas.ManoPuslapiai
{
class ManoPuslapis03 : BazineManoPuslapiu
{
public ManoPuslapis03(IWebDriver driver) : base(driver) { }
private IWebElement MygtukasPrekybosCentras => driver.FindElement(By.CssSelector("li:nth-child(2) > .grey-ab-main-menu"));
private IWebElement MygtukasSuzinotiDaugiau => driver.FindElement(By.XPath("//div[@id='731block']/div/div/a/div/div/div"));
private IWebElement VeroKoffeeHouseLangelis => driver.FindElement(By.CssSelector(".category-list-item-wrap:nth-child(4) img"));
private IWebElement IvertinimaiIrKomentarai => driver.FindElement(By.CssSelector(".nav > li:nth-child(2) > a"));
private readonly String Tekstas = "Klientų atsiliepimai apie pardavėjo VERO COFFEE HOUSE prekes";
private IWebElement LangelisPerkamiausiosPrekes => driver.FindElement(By.CssSelector(".select-target > b"));
private IWebElement PaskrolinkZemyn => driver.FindElement(By.CssSelector(""));
private IWebElement MygtukasGeriausiaiIvertintos => driver.FindElement(By.CssSelector("[data-value='rs']"));
private IWebElement PaveiksliukasKavosPupelesBrazil => driver.FindElement(By.CssSelector("#productBlock29554500 > div > div > div:nth-child(3) > a > img"));
private IWebElement MygtukasItrauktiINoruSarasa => driver.FindElement(By.CssSelector("#productPage > section:nth-child(3) > div.site-block > div.clearfix.detailed-product-top > div.product-info-options > div.add-product-box.sticked > div.mt10 > div.more-btns.fl.one-row-buttons > div:nth-child(1) > div > a > span"));
private IWebElement PaveiksliukasKavosPupelesHola => driver.FindElement(By.CssSelector("#productBlock32731201 > .heightResponse a > img"));
private IWebElement MygtukasItrauktiIPalyginima => driver.FindElement(By.CssSelector(".col-1-of-2 > .btn > span"));
private IWebElement LangelisLygintiPrekes => driver.FindElement(By.CssSelector("#compare > i:nth-child(2)"));
//Pirmas testas:
public void PaspauskPrekybosCentras()
{
MygtukasPrekybosCentras.Click();
}
public void PaspauskSuzinotiDaugiau()
{
MygtukasSuzinotiDaugiau.Click();
}
public void PasirinkParduotuvePagalRaideV()
{
driver.Url = ("https://pigu.lt/lt/parduotuves/v");
}
public void PaspauskVeroKoffeeHouse()
{
VeroKoffeeHouseLangelis.Click();
}
public void PaspauskIvertinimaiIrKomentarai()
{
IvertinimaiIrKomentarai.Click();
}
public void PatikrinkArEsuAtsiliepimuPuslapyje()
{
Assert.AreEqual(Tekstas, driver.FindElement(By.XPath("//div[@id='ratingJump']/h1")).Text);
// Assert.AreEqual("https://pigu.lt/lt/parduotuve/vero-coffee-house/atsiliepimai", driver.Url);
}
//Antras testas:
public void PaspauskPerkamiausiosPrekes()
{
LangelisPerkamiausiosPrekes.Click();
}
public void IsskleiskMeniu()
{
Thread.Sleep(3000);
PaskrolinkZemyn.Click();
}
public void PasirinkGeriausiaiIvertintos()
{
//Thread.Sleep(1000);
MygtukasGeriausiaiIvertintos.Click();
}
public void PaspauskKavosPupelesBrazil()
{
Thread.Sleep(2000);
PaveiksliukasKavosPupelesBrazil.Click();
}
public void PaspauskItrauktiINoruSarasa()
{
Thread.Sleep(3000);
MygtukasItrauktiINoruSarasa.Click();
}
public void PatikrinkArIdejoINoruSarasa()
{
Thread.Sleep(3000);
Assert.AreEqual("https://pigu.lt/lt/u/login", driver.Url);
//Assert.AreEqual("Prisijungti", driver.FindElement(By.Name("login")).Text);
}
//Trecias testas:
public void PaspauskKavosPupelesHola()
{
Thread.Sleep(2000);
PaveiksliukasKavosPupelesHola.Click();
}
public void PaspauskItrauktiIPalyginima()
{
MygtukasItrauktiIPalyginima.Click();
}
public void LygintiPrekes()
{
LangelisLygintiPrekes.Click();
}
public void PatikrinkArEsiPalyginimoLange()
{
Thread.Sleep(3000);
Assert.AreEqual("https://pigu.lt/lt/products/compare/index/2831", driver.Url);
//Assert.AreEqual("Palyginkite prekes", driver.FindElement(By.CssSelector(".page-title")).Text);
}
/*
public void IrasykElAdresaGera("<EMAIL>")
{
}
public void IrasykSlaptazodiGera("Pigult2020")
{
Thread.Sleep(5000);
}
public void PaspauskPrisijungimoMygtuka()
{
Thread.Sleep(5000);
}
public void PatikrinkArPrisijunge()
{
}
*/
}
}
<file_sep>/ManoBaigiamasisProjektas/ManoTestai/ManoTestai01.cs
using System.Threading;
using AutoPaskaitos.ManoBaigiamasisProjektas.ManoPuslapiai;
using NUnit.Framework;
namespace AutoPaskaitos.ManoBaigiamasisProjektas.ManoTestai
{
class ManoTestai01 : BazineManoTestu
{
private ManoPuslapis01 manoPuslapis01;
[SetUp]
public void PriesKiekvienaKlasesTesta()
{
manoPuslapis01 = new ManoPuslapis01(driver);
manoPuslapis01.NueikIprisijungimoForma();
}
[Test]
public void BlogasElAdresas()
{
manoPuslapis01.IrasykElAdresaBloga("egle");
manoPuslapis01.IrasykSlaptazodiBloga("Pigu");
manoPuslapis01.PaspauskPrisijungimoMygtuka();
manoPuslapis01.PatikrinkZinutePirma();
}
[Test]
public void BlogasSlaptazodis()
{
manoPuslapis01.IrasykElAdresaGera("<EMAIL>");
manoPuslapis01.IrasykSlaptazodiBloga("Pigu");
manoPuslapis01.PaspauskPrisijungimoMygtuka();
manoPuslapis01.PatikrinkZinuteAntra();
}
[Test]
public void Prisijungti()
{
manoPuslapis01.IrasykElAdresaGera("<EMAIL>");
manoPuslapis01.IrasykSlaptazodiGera("Pigult2020");
Thread.Sleep(5000);
manoPuslapis01.PaspauskPrisijungimoMygtuka();
Thread.Sleep(5000);
manoPuslapis01.PatikrinkArPrisijunge();
}
}
}<file_sep>/ManoBaigiamasisProjektas/ManoTestai/BazineManoTestu.cs
using System.Collections.Generic;
using System.Text;
using NUnit.Framework;
using OpenQA.Selenium;
using OpenQA.Selenium.Chrome;
using System;
namespace AutoPaskaitos.ManoBaigiamasisProjektas.ManoTestai
{
class BazineManoTestu
{
public IWebDriver driver;
[SetUp]
public void PriesKiekvienaTesta()
{
driver = new ChromeDriver();
driver.Manage().Window.Maximize();
driver.Url = "https://pigu.lt/";
driver.Manage().Timeouts().ImplicitWait = TimeSpan.FromSeconds(10);
}
[TearDown]
public void PoKiekvienoTesto()
{
driver.Quit();
}
}
}
|
b46123f8e9b3b61f7e090d25dc32b6d964f20902
|
[
"C#"
] | 8 |
C#
|
Egledze/AutomatinioTestavimoKursai201130
|
7d6ebbfbe643cc79a81f496fb14579d12d788ae6
|
1d4e86798bf412f7df437e541604f7fe483a586b
|
refs/heads/master
|
<file_sep>#!/bin/bash
die () {
echo "$@"
exit 1
}
if [ "$#" -ne 2 ]
then
die "Usage: build-osrm-config profile.lua port"
fi
if [ ! -d /opt/Project-OSRM ]
then
cd /opt || die cant cd to /opt
git clone https://github.com/DennisOSRM/Project-OSRM.git
git checkout -b v0.3.5 v0.3.5
fi
filename=${1##*/}
soloname=${filename%.*}
dirname="/opt/${soloname}"
echo "killing server instance running...."
kill -9 $(cat /opt/${soloname}/osrm-routed.pid)
while kill -0 $(cat /opt/${soloname}/osrm-routed.pid); do
echo "killing server instance waiting to shutdown...."
sleep 0.5
done
echo create dir $dirname
mkdir -pv $dirname || die cant create directory $dirname
cd $dirname || die cant cd to $dirname
builddir=$dirname/build
cp -a /opt/Project-OSRM/* $dirname/ || die
mkdir -pv $builddir || die
cd $builddir
cp -av /opt/Project-OSRM/profiles $builddir/ || die
cp -av /opt/maps/ref-map.osm.bz2 $builddir/map.osm.bz2 || die
cp -av $1 $builddir/profile.lua || die
$builddir/osrm-extract map.osm.bz2
$builddir/osrm-prepare map.osrm
cat <<EOF > server.ini || die
Threads = 7
IP = 0.0.0.0
Port = $2
hsgrData=${builddir}/map.osrm.hsgr
nodesData=${builddir}/map.osrm.nodes
edgesData=${builddir}/map.osrm.edges
ramIndex=${builddir}/map.osrm.ramIndex
fileIndex=${builddir}/map.osrm.fileIndex
namesData=${builddir}/map.osrm.names
timestamp=${builddir}/map.osrm.timestamp
EOF
nohup 2>&1 $builddir/osrm-routed & echo $! > /opt/${soloname}/osrm-routed.pid | logger -t osrm-indus
exit 0<file_sep>noway.io
========
No Way is a route planner suitable for people with various disabilities. It is yet a prototype from the Hackhathon Hackcess around accessibility achieved by Transilien where No Way was awarded.
Apache License
Copyright 2014 [IOdump](http://iodump.io) and <NAME>
|
d7c77524bbb8df665b9e48a8d780ea078f27ed67
|
[
"Markdown",
"Shell"
] | 2 |
Shell
|
IOdump/noway.io
|
a8e7d591d61951f9882232a912d6b205f93ae59c
|
1a877e3432f19a82b85cf50426fa7224ecd30653
|
refs/heads/master
|
<repo_name>thiwwy/lotto_service_system<file_sep>/src/main/java/lottoApi/service/LottoService.java
package lottoApi.service;
import lottoApi.model.LottoExtraction;
import lottoApi.model.LottoWinner;
import java.util.List;
public interface LottoService {
List<LottoExtraction> getLottos();
List<LottoWinner> getLottoWinners();
LottoExtraction getLottoById(long id);
LottoWinner getWinnerById(long id);
}
<file_sep>/README.md
# lotto_service_system
Java Api for a Lotto Service. Using SpringBoot, testing with JAVA_assured
<file_sep>/src/main/java/lottoApi/service/LottoServiceTelegraph.java
package lottoApi.service;
import lottoApi.exceptions.WinnerNotFoundException;
import lottoApi.model.LottoExtraction;
import lottoApi.exceptions.LottoNotFoundException;
import lottoApi.model.LottoWinner;
import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
@Service("LottoServiceTelegraph")
public class LottoServiceTelegraph implements LottoService {
private List<LottoExtraction> lottos;
private List<LottoWinner> lottoWinners;
public LottoServiceTelegraph() {
lottos = Arrays.asList(
new LottoExtraction(0L,
123456L,
new ArrayList<Integer>(Arrays.asList(1, 3, 5, 25, 46, 70)),
new ArrayList<Long>(Arrays.asList(0L))),
new LottoExtraction(1L,
145456L,
new ArrayList<Integer>(Arrays.asList(1, 3, 35, 47, 59, 87)),
new ArrayList<Long>(Arrays.asList(1L, 2L))),
new LottoExtraction(2L,
143456L,
new ArrayList<Integer>(Arrays.asList(1, 21, 32, 43, 56, 67)),
new ArrayList<Long>(Arrays.asList(0L, 2L)))
);
lottoWinners = Arrays.asList(
new LottoWinner(0L, "Winner_1", new ArrayList<Integer>(Arrays.asList(1, 3, 5, 25, 46, 70))),
new LottoWinner(1L, "Winner_2", new ArrayList<Integer>(Arrays.asList(1, 3, 5, 25, 46))),
new LottoWinner(2L, "Winner_3", new ArrayList<Integer>(Arrays.asList(1, 3, 5, 25)))
);
}
@Override
public List<LottoExtraction> getLottos()
{
if (lottos == null)
throw new LottoNotFoundException(String.format("No lotto found"));
return lottos;
}
@Override
public List<LottoWinner> getLottoWinners() {
if (lottoWinners == null)
throw new WinnerNotFoundException(String.format("No winner found"));
return lottoWinners;
}
@Override
public LottoExtraction getLottoById(long id) {
LottoExtraction lottoFound = null;
for(LottoExtraction lotto : lottos)
if(lotto.getId() == id)
lottoFound = lotto;
if(lottoFound == null)
throw new LottoNotFoundException(String.format("lotto with id %d was not found", id));
return lottoFound;
}
@Override
public LottoWinner getWinnerById(long id) {
LottoWinner winnerFound = null;
for(LottoWinner winner : lottoWinners)
if(winner.getId() == id)
winnerFound = winner;
if(winnerFound == null)
throw new WinnerNotFoundException(String.format("winner with id %d not found", id));
return winnerFound;
}
}
<file_sep>/src/main/java/lottoApi/model/LottoExtraction.java
package lottoApi.model;
import java.util.List;
public class LottoExtraction {
private long id;
private long date;
private List<Integer> winningNumbers;
private List<Long> winners;
public LottoExtraction(long id, long date, List<Integer> winningNumbers, List<Long> winners) {
this.id = id;
this.date = date;
this.winningNumbers = winningNumbers;
this.winners = winners;
}
public long getId() {
return id;
}
public long getDate() {
return date;
}
public List<Integer> getWinningNumbers() {
return winningNumbers;
}
}
|
54b1a0a4a6d3491202c92d8d7720fb4dc152e3e1
|
[
"Markdown",
"Java"
] | 4 |
Java
|
thiwwy/lotto_service_system
|
23284a53c456cf27a9928d7634758003adfd12ba
|
309d93845f291cc6e9e51902c00440617d6a403f
|
refs/heads/master
|
<file_sep>#
# Compact 26-char URL-safe representation of UUIDs
#
import uuid
import base64
from base32_crockford import encode, decode
import sys
def uuid_to_str26( _uuid: uuid ) -> str:
'''
In: uuid
Returns: 26-char representation of uuid
'''
res = encode( _uuid.int )
l = len( res )
if l < 26:
prefix = ''.join( '0' for _ in range ( 26-l ) )
res = prefix + res
return res
def str26_to_int( string: str ) -> int:
'''
In: string, 26char encoding of uuid
Returns: Int of uuid
'''
return decode( string )
def uuid_to_str22( _uuid: uuid ) -> str:
'''
In: uuid, 16 bytes
Returns 22 char representationof uuid.
'''
barray = _uuid.int.to_bytes( 16, byteorder=sys.byteorder )
res = base64.urlsafe_b64encode( barray )
strres = res.decode()
# discard padding '='
return strres[:-2]
def str22_to_int( string: str ) -> int:
'''
In: 22char encoding of uuid
Returns Int of uuid
'''
# restore padding and make a byte array
ba = bytearray( string + '==', encoding="utf-8" )
barray = base64.urlsafe_b64decode( ba )
return int.from_bytes( barray, byteorder=sys.byteorder )
<file_sep>import uuid
import unittest
from compact_uuids import (
uuid_to_str26,
str26_to_int,
uuid_to_str22,
str22_to_int,
)
class compact_uuids_test( unittest.TestCase ):
@classmethod
def setUpClass( cls ):
cls.uuids = []
# generate uuids to play with... 40 of those.
for _ in range( 40 ):
cls.uuids.append( uuid.uuid4() )
return
def test_26( self ):
'''
Test 26-char representation
'''
print( '\n12345678901234567890123456789012 12345678901234567890123456')
for u1 in self.uuids:
self.assertEqual( len( str(u1) ), 36 )
str26 = uuid_to_str26( u1 )
self.assertEqual( len( str26 ), 26 )
int26 = str26_to_int( str26 )
self.assertEqual( u1.int, int26 )
print( f'{u1.hex} {str26}')
return
def test_22( self ):
'''
Test 22-char representation
'''
print( '\n12345678901234567890123456789012 1234567890123456789012')
for u1 in self.uuids:
self.assertEqual( len( str(u1) ), 36 )
str22 = uuid_to_str22( u1 )
self.assertEqual( len( str22 ), 22 )
int22 = str22_to_int( str22 )
self.assertEqual( u1.int, int22 )
print( f'{u1.hex} {str22}')
return
def test_both( self ):
'''
Compare uuid representation
'''
print( '\n12345678901234567890123456789012 12345678901234567890123456 1234567890123456789012')
for u1 in self.uuids:
str26 = uuid_to_str26( u1 )
self.assertEqual( len( str26 ), 26 )
int26 = str26_to_int( str26 )
self.assertEqual( u1.int, int26 )
str22 = uuid_to_str22( u1 )
self.assertEqual( len( str22 ), 22 )
int22 = str22_to_int( str22 )
self.assertEqual( u1.int, int22 )
print( f'{u1.hex} {str26} {str22}')
return
if __name__ == '__main__':
unittest.main()
<file_sep># Compact UUID Representation
Using base64 or base32 encoding it is possible to express 16 UUID bytes
as 22 or 26 characters:
* use base64 encoding to pack UUID into 22 chars - digits and CAPS alpha (no U,O)
* use base32 encoding to pack UUID into 26 chars - digits, alpha, _, -
Inspired by:
https://github.com/tonsky/compact-uuids
https://www.crockford.com/base32.html
base32-crockford.py is from
https://github.com/jbittel/base32-crockford/
Use at your own risk.
|
242a607e4c801c891f8cc00598a9258e16179345
|
[
"Markdown",
"Python"
] | 3 |
Python
|
asokolsky/compact-uuids-py
|
6585d09d897f156b718994c26984aaea0710aa0e
|
b3788e58235bc0e8d50565e7fa47dc98e3663c18
|
refs/heads/master
|
<file_sep>FROM duluca/minimal-node-web-server
WORKDIR /usr/src/app
COPY . public<file_sep>const app = new PIXI.Application(800, 600, {backgroundColor : 0x1099bb});
document.body.appendChild(app.view);
PIXI.loader.add('tank', 'tank.svg').load((loader, resources) => {
const tank = new PIXI.Sprite(resources.tank.texture);
// Setup the position of the tank
tank.x = app.renderer.width / 2;
tank.y = app.renderer.height / 2;
// Rotate around the center
tank.anchor.x = 0.5;
tank.anchor.y = 0.5;
// Add the tank to the scene we are building
app.stage.addChild(tank);
// Listen for frame updates
app.ticker.add(() => {
// each frame we spin the tank around a bit
tank.rotation += 0.01;
});
});
|
48b8a2bc00df99c546f04eb7b48c7545404fd231
|
[
"JavaScript",
"Dockerfile"
] | 2 |
Dockerfile
|
ewoutkleinsmann/battletank_viewer
|
7f9f56587de2f76494a3ac74c641723643e54cda
|
7d7f02d2cb8227ff9e55501d48fa9eb1cbe45500
|
refs/heads/master
|
<file_sep>-- phpMyAdmin SQL Dump
-- version 4.6.5.2
-- https://www.phpmyadmin.net/
--
-- Host: localhost:8889
-- Generation Time: Dec 12, 2018 at 01:22 AM
-- Server version: 5.6.35
-- PHP Version: 7.0.15
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8mb4 */;
--
-- Database: `universityregistrar`
--
-- --------------------------------------------------------
--
-- Table structure for table `department_courses`
--
CREATE TABLE `department_courses` (
`id` int(11) NOT NULL,
`course_id` int(11) NOT NULL,
`department_id` int(11) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Dumping data for table `department_courses`
--
INSERT INTO `department_courses` (`id`, `course_id`, `department_id`) VALUES
(1, 5, 1),
(2, 9, 4),
(3, 10, 4),
(4, 11, 4);
--
-- Indexes for dumped tables
--
--
-- Indexes for table `department_courses`
--
ALTER TABLE `department_courses`
ADD PRIMARY KEY (`id`);
--
-- AUTO_INCREMENT for dumped tables
--
--
-- AUTO_INCREMENT for table `department_courses`
--
ALTER TABLE `department_courses`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=5;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
<file_sep>using Microsoft.VisualStudio.TestTools.UnitTesting;
using UniversityRegistrar.Models;
namespace UniversityRegistrar.Tests
{
}
<file_sep>using System.Collections.Generic;
using Microsoft.AspNetCore.Mvc;
using UniversityRegistrar.Models;
using System;
namespace UniversityRegistrar.Controllers
{
public class UniversityController : Controller
{
[HttpGet("/university/newstudent")]
public ActionResult Student()
{
List<StudentClass> allStudents = StudentClass.GetAll();
return View("NewStudent", allStudents);
}
[HttpPost("/university/newstudent")]
public ActionResult NewStudent(string name, string date)
{
StudentClass student = new StudentClass(name, date);
student.Save();
return RedirectToAction("Student");
}
[HttpGet("/university/newcourse")]
public ActionResult Course()
{
List<CourseClass> allCourses = CourseClass.GetAll();
return View("NewCourse", allCourses);
}
[HttpPost("/university/newcourse")]
public ActionResult NewCourse(string name, string code)
{
CourseClass course = new CourseClass(name, code);
course.Save();
return RedirectToAction("Course");
}
[HttpGet("/university/newdepartment")]
public ActionResult Department()
{
List<DepartmentClass> departments = DepartmentClass.GetAll();
return View("NewDepartment", departments);
}
[HttpPost("/university/newdepartment")]
public ActionResult NewDepartment(string name)
{
DepartmentClass department = new DepartmentClass(name);
department.Save();
return RedirectToAction("Department");
}
[HttpGet("/university/assign")]
public ActionResult Assign()
{
Dictionary<string, object> allInfo = new Dictionary<string, object>();
List<StudentClass> students = StudentClass.GetAll();
List<CourseClass> courses = CourseClass.GetAll();
allInfo.Add("students", students);
allInfo.Add("courses" , courses);
return View(allInfo);
}
[HttpPost("/university/assign")]
public ActionResult NewAssign(int student, int course)
{
JoinTableClass join = new JoinTableClass(student, course);
join.Save();
return RedirectToAction("Assign");
}
[HttpGet("/university/assignstudents")]
public ActionResult AssignStudents()
{
Dictionary<string, object> allInfo = new Dictionary<string, object>();
List<StudentClass> students = StudentClass.GetAll();
List<DepartmentClass> departments = DepartmentClass.GetAll();
allInfo.Add("students", students);
allInfo.Add("departments" , departments);
return View(allInfo);
}
[HttpPost("/university/assignstudents")]
public ActionResult NewAssignStudents(int student, int department)
{
JoinStudentDepartmentClass join = new JoinStudentDepartmentClass(student, department);
join.Save();
return RedirectToAction("AssignStudents");
}
[HttpGet("/university/assigncourses")]
public ActionResult AssignCourses()
{
Dictionary<string, object> allInfo = new Dictionary<string, object>();
List<CourseClass> courses = CourseClass.GetAll();
List<DepartmentClass> departments = DepartmentClass.GetAll();
allInfo.Add("courses", courses);
allInfo.Add("departments" , departments);
return View(allInfo);
}
[HttpPost("/university/assigncourses")]
public ActionResult NewAssignCourses(int course, int department)
{
JoinCourseDepartmentClass join = new JoinCourseDepartmentClass(course, department);
join.Save();
return RedirectToAction("Assigncourses");
}
[HttpGet("/university/student/{id}")]
public ActionResult ShowStudent(int id)
{
Dictionary<string, object> allInfo = new Dictionary<string, object>();
List<StudentClass> student = StudentClass.FindById(id);
List<CourseClass> courses = JoinTableClass.GetCoursesByStudentId(id);
List<bool> classStatus = new List<bool>() {};
foreach (CourseClass course in courses)
{
bool status = JoinTableClass.FindStatusByStudentAndCourseId(id, course.GetId());
classStatus.Add(status);
}
allInfo.Add("student", student);
allInfo.Add("courses", courses);
allInfo.Add("status", classStatus);
return View("ShowStudent", allInfo);
}
[HttpGet("/university/course/{id}")]
public ActionResult ShowCourse(int id)
{
Dictionary<string, object> allInfo = new Dictionary<string, object>();
List<StudentClass> students = JoinTableClass.GetStudentsByCourseId(id);
List<CourseClass> course = CourseClass.FindById(id);
allInfo.Add("students", students);
allInfo.Add("course", course);
return View("ShowCourse", allInfo);
}
[HttpGet("/university/department/{id}")]
public ActionResult ShowDepartment(int id)
{
Dictionary<string, object> allInfo = new Dictionary<string, object>();
List<DepartmentClass> department = DepartmentClass.FindById(id);
List<StudentClass> students = JoinStudentDepartmentClass.GetStudentsByDepartmentId(id);
List<CourseClass> courses = JoinCourseDepartmentClass.GetCoursesByDepartmentId(id);
allInfo.Add("students", students);
allInfo.Add("courses", courses);
allInfo.Add("department", department);
return View("ShowDepartment", allInfo);
}
[HttpGet("/university/updategrade/{id}")]
public ActionResult UpdateGrade(int id)
{
Dictionary<string, object> allInfo = new Dictionary<string, object>();
List<StudentClass> student = StudentClass.FindById(id);
List<CourseClass> courses = JoinTableClass.GetCoursesByStudentId(id);
List<bool> classStatus = new List<bool>() {};
foreach (CourseClass course in courses)
{
bool status = JoinTableClass.FindStatusByStudentAndCourseId(id, course.GetId());
classStatus.Add(status);
}
allInfo.Add("student", student);
allInfo.Add("courses", courses);
allInfo.Add("status", classStatus);
return View(allInfo);
}
[HttpPost("/university/updategrade/{id}")]
public ActionResult Update(int id, int course, string status)
{
if (status == "1")
{
JoinTableClass.UpdatePassing(id, course);
}
else
{
JoinTableClass.UpdateFailing(id, course);
}
return RedirectToAction("ShowStudent", id);
}
}
}
<file_sep>-- phpMyAdmin SQL Dump
-- version 4.6.5.2
-- https://www.phpmyadmin.net/
--
-- Host: localhost:8889
-- Generation Time: Dec 12, 2018 at 01:21 AM
-- Server version: 5.6.35
-- PHP Version: 7.0.15
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8mb4 */;
--
-- Database: `universityregistrar`
--
-- --------------------------------------------------------
--
-- Table structure for table `courses`
--
CREATE TABLE `courses` (
`id` int(11) NOT NULL,
`name` varchar(255) NOT NULL,
`code` varchar(255) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Dumping data for table `courses`
--
INSERT INTO `courses` (`id`, `name`, `code`) VALUES
(1, 'English 101', 'ENG101'),
(2, 'History of The Land', 'HIST101'),
(3, 'English: Hard Grammar', 'ENG102'),
(4, 'Archaeology for Noobs', 'ARCH101'),
(5, 'Pottery', 'ART101'),
(6, 'Play Games Outside', 'FIT101'),
(7, 'Intermediate Pottery ', 'ART201'),
(8, 'Advanced Pottery', 'ART301'),
(9, 'Beginners Web Development', 'WEB101'),
(10, 'Intermediate Web Deveopment', 'WEB 102'),
(11, 'Advanced Web Development ', 'WEB301');
--
-- Indexes for dumped tables
--
--
-- Indexes for table `courses`
--
ALTER TABLE `courses`
ADD PRIMARY KEY (`id`);
--
-- AUTO_INCREMENT for dumped tables
--
--
-- AUTO_INCREMENT for table `courses`
--
ALTER TABLE `courses`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=12;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
<file_sep>-- phpMyAdmin SQL Dump
-- version 4.6.5.2
-- https://www.phpmyadmin.net/
--
-- Host: localhost:8889
-- Generation Time: Dec 12, 2018 at 01:22 AM
-- Server version: 5.6.35
-- PHP Version: 7.0.15
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8mb4 */;
--
-- Database: `universityregistrar`
--
-- --------------------------------------------------------
--
-- Table structure for table `department_students`
--
CREATE TABLE `department_students` (
`id` int(11) NOT NULL,
`student_id` int(11) NOT NULL,
`department_id` int(11) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Dumping data for table `department_students`
--
INSERT INTO `department_students` (`id`, `student_id`, `department_id`) VALUES
(1, 1, 1),
(2, 3, 2),
(3, 1, 4),
(4, 8, 4),
(5, 7, 4),
(6, 6, 4),
(7, 5, 4);
--
-- Indexes for dumped tables
--
--
-- Indexes for table `department_students`
--
ALTER TABLE `department_students`
ADD PRIMARY KEY (`id`);
--
-- AUTO_INCREMENT for dumped tables
--
--
-- AUTO_INCREMENT for table `department_students`
--
ALTER TABLE `department_students`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=8;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
<file_sep>using System;
using System.Collections.Generic;
using MySql.Data.MySqlClient;
using UniversityRegistrar;
namespace UniversityRegistrar.Models
{
public class DepartmentClass
{
private int _id;
private string _name;
public DepartmentClass(string name,int id=0)
{
_id = id;
_name = name;
}
public int GetId()
{
return _id;
}
public string GetName()
{
return _name;
}
public void Save()
{
MySqlConnection conn = DB.Connection();
conn.Open();
var cmd = conn.CreateCommand() as MySqlCommand;
cmd.CommandText = @"INSERT INTO departments (name) VALUES (@name);";
cmd.Parameters.AddWithValue("@name", this._name);
cmd.ExecuteNonQuery();
_id = (int) cmd.LastInsertedId;
conn.Close();
if (conn != null)
{
conn.Dispose();
}
}
public static List<DepartmentClass> GetAll()
{
List<DepartmentClass> allDepartments = new List<DepartmentClass>{};
MySqlConnection conn = DB.Connection();
conn.Open();
var cmd = conn.CreateCommand() as MySqlCommand;
cmd.CommandText = @"SELECT * FROM departments;";
MySqlDataReader rdr = cmd.ExecuteReader() as MySqlDataReader;
while(rdr.Read())
{
int id = rdr.GetInt32(0);
string name = rdr.GetString(1);
DepartmentClass newDepartment = new DepartmentClass(name, id);
allDepartments.Add(newDepartment);
}
conn.Close();
if (conn != null)
{
conn.Dispose();
}
return allDepartments;
}
public static List<DepartmentClass> FindById(int id)
{
List<DepartmentClass> currentDepartment = new List<DepartmentClass>{};
MySqlConnection conn = DB.Connection();
conn.Open();
MySqlCommand cmd = conn.CreateCommand() as MySqlCommand;
cmd.CommandText = @"SELECT * FROM departments WHERE id = " + id + ";";
MySqlDataReader rdr = cmd.ExecuteReader() as MySqlDataReader;
while(rdr.Read())
{
int idz = rdr.GetInt32(0);
string name = rdr.GetString(1);
DepartmentClass newDepartment = new DepartmentClass(name, idz);
currentDepartment.Add(newDepartment);
}
conn.Close();
if (conn !=null)
{
conn.Dispose();
}
return currentDepartment;
}
}
public class JoinStudentDepartmentClass{
private int _id;
private int _student_id;
private int _department_id;
public JoinStudentDepartmentClass(int student_id, int department_id, int id=0)
{
_student_id = student_id;
_department_id = department_id;
_id = id;
}
public int GetId()
{
return _id;
}
public int GetStudentId()
{
return _student_id;
}
public int GetDepartmentId()
{
return _department_id;
}
public void Save()
{
MySqlConnection conn = DB.Connection();
conn.Open();
var cmd = conn.CreateCommand() as MySqlCommand;
cmd.CommandText = @"INSERT INTO department_students(student_id, department_id) VALUES (@student, @department);";
cmd.Parameters.AddWithValue("@student", this._student_id);
cmd.Parameters.AddWithValue("@department", this._department_id);
cmd.ExecuteNonQuery();
_id = (int) cmd.LastInsertedId;
conn.Close();
if (conn != null)
{
conn.Dispose();
}
}
public static List<StudentClass> GetStudentsByDepartmentId(int departmentId)
{
List<StudentClass> allStudents = new List<StudentClass>{};
MySqlConnection conn = DB.Connection();
conn.Open();
var cmd = conn.CreateCommand() as MySqlCommand;
cmd.CommandText = @"SELECT students.* FROM
students JOIN department_students ON (students.id = department_students.student_id)
JOIN departments ON (department_students.department_id = departments.id)
WHERE departments.id = " + departmentId + ";";
MySqlDataReader rdr = cmd.ExecuteReader() as MySqlDataReader;
while(rdr.Read())
{
int id = rdr.GetInt32(0);
string name = rdr.GetString(1);
DateTime date = (DateTime) rdr.GetDateTime(2);
StudentClass newStudent = new StudentClass(name, date.ToString("MM/dd/yyyy"), id);
allStudents.Add(newStudent);
}
conn.Close();
if (conn != null)
{
conn.Dispose();
}
return allStudents;
}
}
public class JoinCourseDepartmentClass
{
private int _id;
private int _course_id;
private int _department_id;
public JoinCourseDepartmentClass(int course_id, int department_id, int id=0)
{
_course_id = course_id;
_department_id = department_id;
_id = id;
}
public int GetId()
{
return _id;
}
public int GetCourseId()
{
return _course_id;
}
public int GetDepartmentId()
{
return _department_id;
}
public void Save()
{
MySqlConnection conn = DB.Connection();
conn.Open();
var cmd = conn.CreateCommand() as MySqlCommand;
cmd.CommandText = @"INSERT INTO department_courses(course_id, department_id) VALUES (@course, @department);";
cmd.Parameters.AddWithValue("@course", this._course_id);
cmd.Parameters.AddWithValue("@department", this._department_id);
cmd.ExecuteNonQuery();
_id = (int) cmd.LastInsertedId;
conn.Close();
if (conn != null)
{
conn.Dispose();
}
}
public static List<CourseClass> GetCoursesByDepartmentId(int departmentId)
{
List<CourseClass> allCourses = new List<CourseClass>{};
MySqlConnection conn = DB.Connection();
conn.Open();
var cmd = conn.CreateCommand() as MySqlCommand;
cmd.CommandText = @"SELECT courses.* FROM
courses JOIN department_courses ON (courses.id = department_courses.Course_id)
JOIN departments ON (department_courses.department_id = departments.id)
WHERE departments.id = " + departmentId + ";";
MySqlDataReader rdr = cmd.ExecuteReader() as MySqlDataReader;
while(rdr.Read())
{
int id = rdr.GetInt32(0);
string name = rdr.GetString(1);
string code = rdr.GetString(2);
CourseClass newCourse = new CourseClass(name, code, id);
allCourses.Add(newCourse);
}
conn.Close();
if (conn != null)
{
conn.Dispose();
}
return allCourses;
}
}
public class JoinTableClass
{
private int _id;
private int _student_id;
private int _course_id;
private int _department_id;
private int _status;
public JoinTableClass(int student_id, int course_id, int id=0)
{
_student_id = student_id;
_course_id = course_id;
_id = id;
}
public JoinTableClass(int student_id, int course_id, int status, int id=0)
{
_student_id = student_id;
_course_id = course_id;
_status = status;
_id = id;
}
public int GetId()
{
return _id;
}
public int GetStudentId()
{
return _student_id;
}
public int GetCourseId()
{
return _course_id;
}
public int GetStatus()
{
return _status;
}
public void Save()
{
MySqlConnection conn = DB.Connection();
conn.Open();
var cmd = conn.CreateCommand() as MySqlCommand;
cmd.CommandText = @"INSERT INTO students_courses(student_id, course_id) VALUES (@student, @course);";
cmd.Parameters.AddWithValue("@student", this._student_id);
cmd.Parameters.AddWithValue("@course", this._course_id);
cmd.ExecuteNonQuery();
_id = (int) cmd.LastInsertedId;
conn.Close();
if (conn != null)
{
conn.Dispose();
}
}
public static List<CourseClass> GetCoursesByStudentId(int studentId)
{
List<CourseClass> allCourses = new List<CourseClass>{};
MySqlConnection conn = DB.Connection();
conn.Open();
var cmd = conn.CreateCommand() as MySqlCommand;
cmd.CommandText = @"SELECT courses.* FROM
courses JOIN students_courses ON (courses.id = students_courses.course_id)
JOIN students ON (students_courses.student_id = students.id)
WHERE students.id = " + studentId + ";";
MySqlDataReader rdr = cmd.ExecuteReader() as MySqlDataReader;
while(rdr.Read())
{
int id = rdr.GetInt32(0);
string name = rdr.GetString(1);
string code = rdr.GetString(2);
CourseClass newCourse = new CourseClass(name, code, id);
allCourses.Add(newCourse);
}
conn.Close();
if (conn != null)
{
conn.Dispose();
}
return allCourses;
}
public static List<StudentClass> GetStudentsByCourseId(int courseId)
{
List<StudentClass> allStudents = new List<StudentClass>{};
MySqlConnection conn = DB.Connection();
conn.Open();
var cmd = conn.CreateCommand() as MySqlCommand;
cmd.CommandText = @"SELECT students.* FROM
students JOIN students_courses ON (students.id = students_courses.student_id)
JOIN courses ON (students_courses.course_id = courses.id)
WHERE courses.id = " + courseId + ";";
MySqlDataReader rdr = cmd.ExecuteReader() as MySqlDataReader;
while(rdr.Read())
{
int id = rdr.GetInt32(0);
string name = rdr.GetString(1);
DateTime date = (DateTime) rdr.GetDateTime(2);
StudentClass newStudent = new StudentClass(name, date.ToString("MM/dd/yyyy"), id);
allStudents.Add(newStudent);
}
conn.Close();
if (conn != null)
{
conn.Dispose();
}
return allStudents;
}
public static bool FindStatusByStudentAndCourseId(int student_id, int course_id)
{
MySqlConnection conn = DB.Connection();
conn.Open();
var cmd = conn.CreateCommand() as MySqlCommand;
cmd.CommandText = @"SELECT status FROM students_courses WHERE student_id = " + student_id + " AND course_id = " + course_id + ";";
MySqlDataReader rdr = cmd.ExecuteReader() as MySqlDataReader;
bool status = false;
while(rdr.Read())
{
if (rdr.GetBoolean(0) == false)
{
status = false;
}
else
{
status = true;
}
}
conn.Close();
if (conn != null)
{
conn.Dispose();
}
return status;
}
public static void UpdatePassing(int student_id, int course_id)
{
MySqlConnection conn = DB.Connection();
conn.Open();
var cmd = conn.CreateCommand() as MySqlCommand;
cmd.CommandText = @"UPDATE students_courses SET status = true WHERE student_id = " + student_id + " AND course_id = " + course_id + ";";
cmd.ExecuteNonQuery();
conn.Close();
if (conn != null)
{
conn.Dispose();
}
}
public static void UpdateFailing(int student_id, int course_id)
{
MySqlConnection conn = DB.Connection();
conn.Open();
var cmd = conn.CreateCommand() as MySqlCommand;
cmd.CommandText = @"UPDATE students_courses SET status = false WHERE student_id = " + student_id + " AND course_id = " + course_id + ";";
cmd.ExecuteNonQuery();
conn.Close();
if (conn != null)
{
conn.Dispose();
}
}
}
public class StudentClass
{
private int _id;
private string _name;
private string _date;
public StudentClass(string name, string date, int id=0)
{
_name = name;
_date = date;
_id = id;
}
public int GetId()
{
return _id;
}
public string GetName()
{
return _name;
}
public string GetDate()
{
return _date;
}
public void Save()
{
MySqlConnection conn = DB.Connection();
conn.Open();
var cmd = conn.CreateCommand() as MySqlCommand;
cmd.CommandText = @"INSERT INTO students(name, date) VALUES (@name, @date);";
cmd.Parameters.AddWithValue("@name", this._name);
cmd.Parameters.AddWithValue("@date", this._date);
cmd.ExecuteNonQuery();
_id = (int) cmd.LastInsertedId;
conn.Close();
if (conn != null)
{
conn.Dispose();
}
}
public static List<StudentClass> GetAll()
{
List<StudentClass> allStudents = new List<StudentClass>{};
MySqlConnection conn = DB.Connection();
conn.Open();
var cmd = conn.CreateCommand() as MySqlCommand;
cmd.CommandText = @"SELECT * FROM students;";
MySqlDataReader rdr = cmd.ExecuteReader() as MySqlDataReader;
while(rdr.Read())
{
int id = rdr.GetInt32(0);
string name = rdr.GetString(1);
DateTime date = (DateTime) rdr.GetDateTime(2);
StudentClass newStudent = new StudentClass(name, date.ToString("MM/dd/yyyy"), id);
allStudents.Add(newStudent);
}
conn.Close();
if (conn != null)
{
conn.Dispose();
}
return allStudents;
}
public static List<StudentClass> FindById(int id)
{
List<StudentClass> currentStudent = new List<StudentClass>{};
MySqlConnection conn = DB.Connection();
conn.Open();
MySqlCommand cmd = conn.CreateCommand() as MySqlCommand;
cmd.CommandText = @"SELECT * FROM students WHERE id = " + id + ";";
MySqlDataReader rdr = cmd.ExecuteReader() as MySqlDataReader;
while(rdr.Read())
{
int idz = rdr.GetInt32(0);
string name = rdr.GetString(1);
DateTime date = (DateTime) rdr.GetDateTime(2);
StudentClass newStudent = new StudentClass(name, date.ToString("MM/dd/yyyy"), idz);
currentStudent.Add(newStudent);
}
conn.Close();
if (conn !=null)
{
conn.Dispose();
}
return currentStudent;
}
}
public class CourseClass
{
private int _id;
private string _name;
private string _code;
public CourseClass(string name, string code, int id=0)
{
_name = name;
_code = code;
_id = id;
}
public int GetId()
{
return _id;
}
public string GetName()
{
return _name;
}
public string GetCode()
{
return _code;
}
public void Save()
{
MySqlConnection conn = DB.Connection();
conn.Open();
var cmd = conn.CreateCommand() as MySqlCommand;
cmd.CommandText = @"INSERT INTO courses(name, code) VALUES (@name, @code);";
cmd.Parameters.AddWithValue("@name", this._name);
cmd.Parameters.AddWithValue("@code", this._code);
cmd.ExecuteNonQuery();
_id = (int) cmd.LastInsertedId;
conn.Close();
if (conn != null)
{
conn.Dispose();
}
}
public static List<CourseClass> FindById(int id)
{
List<CourseClass> currentCourse = new List<CourseClass>{};
MySqlConnection conn = DB.Connection();
conn.Open();
MySqlCommand cmd = conn.CreateCommand() as MySqlCommand;
cmd.CommandText = @"SELECT * FROM courses WHERE id = " + id + ";";
MySqlDataReader rdr = cmd.ExecuteReader() as MySqlDataReader;
while(rdr.Read())
{
int idz = rdr.GetInt32(0);
string name = rdr.GetString(1);
string code = rdr.GetString(2);
CourseClass newCourse = new CourseClass(name, code, idz);
currentCourse.Add(newCourse);
}
conn.Close();
if (conn !=null)
{
conn.Dispose();
}
return currentCourse;
}
public static List<CourseClass> GetAll()
{
List<CourseClass> allCourses = new List<CourseClass>{};
MySqlConnection conn = DB.Connection();
conn.Open();
var cmd = conn.CreateCommand() as MySqlCommand;
cmd.CommandText = @"SELECT * FROM courses;";
MySqlDataReader rdr = cmd.ExecuteReader() as MySqlDataReader;
while(rdr.Read())
{
int id = rdr.GetInt32(0);
string name = rdr.GetString(1);
string code = rdr.GetString(2);
CourseClass newCourse = new CourseClass(name, code, id);
allCourses.Add(newCourse);
}
conn.Close();
if (conn != null)
{
conn.Dispose();
}
return allCourses;
}
}
}
|
a048ea1cce4f4dee8f790450b0499af747008338
|
[
"C#",
"SQL"
] | 6 |
SQL
|
manasavesala/UniversityRegistrar.Solution
|
03d5ecbd3d36bb328a00f791b0415deae68602f3
|
8e06f055fe0dea6adf8f6d47b7d1c70848411fce
|
refs/heads/master
|
<repo_name>ErrEoE/phpactor<file_sep>/lib/Application/FileInfoAtOffset.php
<?php
namespace Phpactor\Application;
use DTL\TypeInference\TypeInference;
use DTL\TypeInference\Domain\Offset;
use DTL\TypeInference\Domain\SourceCode;
use DTL\Filesystem\Domain\Filesystem;
use DTL\ClassFileConverter\Domain\FilePath;
use DTL\ClassFileConverter\Domain\ClassName;
use DTL\ClassFileConverter\Domain\ClassToFile;
use DTL\TypeInference\Domain\InferredType;
use DTL\TypeInference\Domain\TypeInferer;
use DTL\ClassFileConverter\Domain\FileToClass;
use DTL\ClassFileConverter\Domain\ClassToFileFileToClass;
final class FileInfoAtOffset
{
/**
* @var TypeInference
*/
private $inference;
/**
* @var ClassToFileFileToClass
*/
private $classToFileConverter;
/**
* @var Filesystem
*/
private $filesystem;
public function __construct(
TypeInferer $inference,
ClassToFileFileToClass $classToFileConverter,
Filesystem $filesystem
)
{
$this->inference = $inference;
$this->classToFileConverter = $classToFileConverter;
$this->filesystem = $filesystem;
}
public function infoForFile(string $sourcePath)
{
$path = $this->filesystem->createPath($sourcePath);
$classCandidates = $this->classToFileConverter->fileToClassCandidates(FilePath::fromString((string) $path));
$return = [
'class' => null,
'class_name' => null,
'class_namespace' => null,
];
if ($classCandidates->noneFound()) {
return $return;
}
$best = $classCandidates->best();
return [
'class' => (string) $best,
'class_name' => $best->name(),
'class_namespace' => $best->namespace(),
];
}
public function infoForOffset(string $sourcePath, int $offset, $showFrame = false): array
{
$path = $this->filesystem->createPath($sourcePath);
$result = $this->inference->inferTypeAtOffset(
SourceCode::fromString(
$this->filesystem->getContents($path)
),
Offset::fromInt($offset)
);
$return = [
'type' => (string) $result->type(),
'path' => null,
'messages' => $result->log()->messages()
];
if ($showFrame) {
$return['frame'] = $result->frame()->asDebugMap();
}
if (InferredType::unknown() == $result->type()) {
return $return;
}
$fileCandidates = $this->classToFileConverter->classToFileCandidates(ClassName::fromString((string) $result->type()));
foreach ($fileCandidates as $candidate) {
if (file_exists((string) $candidate)) {
$return['path'] = (string) $candidate;
}
}
return $return;
}
}
<file_sep>/lib/Container/CoreExtension.php
<?php
namespace Phpactor\Container;
use Composer\Autoload\ClassLoader;
use PhpBench\DependencyInjection\Container;
use PhpBench\DependencyInjection\ExtensionInterface;
use Symfony\Component\Console\Application;
use Phpactor\UserInterface\Console\Command\ClassMoveCommand;
use Phpactor\Application\ClassMover as ClassMoverApp;
use DTL\Filesystem\Adapter\Git\GitFilesystem;
use DTL\Filesystem\Domain\Cwd;
use DTL\ClassMover\ClassMover;
use DTL\Filesystem\Adapter\Simple\SimpleFilesystem;
use Phpactor\Application\FileInfo;
use DTL\TypeInference\TypeInference;
use Phpactor\UserInterface\Console\Command\FileInfoAtOffsetCommand;
use Phpactor\Application\ClassSearch;
use Phpactor\UserInterface\Console\Command\ClassSearchCommand;
use DTL\Filesystem\Adapter\Composer\ComposerFilesystem;
use DTL\Filesystem\Domain\FilePath;
use Phpactor\UserInterface\Console\Command\FileInfoCommand;
use DTL\ClassFileConverter\Domain\ClassToFileFileToClass;
use DTL\ClassFileConverter\Adapter\Composer\ComposerClassToFile;
use DTL\ClassFileConverter\Adapter\Composer\ComposerFileToClass;
use DTL\ClassFileConverter\Domain\ChainFileToClass;
use DTL\ClassFileConverter\Domain\ChainClassToFile;
use DTL\Filesystem\Adapter\Composer\ComposerFileListProvider;
use DTL\Filesystem\Domain\ChainFileListProvider;
use Phpactor\UserInterface\Console\Prompt\ChainPrompt;
use Phpactor\UserInterface\Console\Prompt\BashPrompt;
use DTL\TypeInference\Adapter\ClassToFile\ClassToFileSourceCodeLoader;
use DTL\TypeInference\Adapter\TolerantParser\TolerantTypeInferer;
use DTL\TypeInference\Adapter\WorseReflection\WorseSourceCodeLocator;
use DTL\TypeInference\Adapter\WorseReflection\WorseMemberTypeResolver;
use DTL\WorseReflection\Reflector;
use Phpactor\UserInterface\Console\Command\ClassCopyCommand;
use Phpactor\Application\ClassCopy;
use Phpactor\Application\FileInfoAtOffset;
class CoreExtension implements ExtensionInterface
{
const APP_NAME = 'phpactor';
const APP_VERSION = '0.2.0';
static $autoloader;
public function getDefaultConfig()
{
return [
'autoload' => 'vendor/autoload.php',
'cwd' => getcwd(),
];
}
public function load(Container $container)
{
$this->registerConsole($container);
$this->registerComposer($container);
$this->registerClassToFile($container);
$this->registerClassMover($container);
$this->registerTypeInference($container);
$this->registerSourceCodeFilesystem($container);
$this->registerApplicationServices($container);
}
private function registerConsole(Container $container)
{
$container->register('command.class_move', function (Container $container) {
return new ClassMoveCommand(
$container->get('application.class_mover'),
$container->get('console.prompter')
);
}, [ 'ui.console.command' => []]);
$container->register('command.class_copy', function (Container $container) {
return new ClassCopyCommand(
$container->get('application.class_copy'),
$container->get('console.prompter')
);
}, [ 'ui.console.command' => []]);
$container->register('command.class_search', function (Container $container) {
return new ClassSearchCommand(
$container->get('application.class_search')
);
}, [ 'ui.console.command' => []]);
$container->register('command.file_offset', function (Container $container) {
return new FileInfoAtOffsetCommand(
$container->get('application.file_info_at_offset')
);
}, [ 'ui.console.command' => []]);
$container->register('command.file_info', function (Container $container) {
return new FileInfoCommand(
$container->get('application.file_info')
);
}, [ 'ui.console.command' => []]);
$container->register('console.prompter', function (Container $container) {
return new ChainPrompt([
new BashPrompt()
]);
});
}
private function registerComposer(Container $container)
{
$container->register('composer.class_loaders', function (Container $container) {
$autoloaderPaths = (array) $container->getParameter('autoload');
$autoloaders = [];
foreach ($autoloaderPaths as $autoloaderPath) {
if (!file_exists($autoloaderPath)) {
throw new \InvalidArgumentException(sprintf(
'Could not locate autoloaderPath file "%s"', $autoloaderPath
));
}
$autoloader = require $autoloaderPath;
if (!$autoloader instanceof ClassLoader) {
throw new \RuntimeException('Autoloader is not an instance of ClassLoader');
}
$autoloaders[] = $autoloader;
}
return $autoloaders;
});
}
private function registerClassToFile(Container $container)
{
$container->register('class_to_file.converter', function (Container $container) {
return new ClassToFileFileToClass(
$container->get('class_to_file.class_to_file'),
$container->get('class_to_file.file_to_class')
);
});
$container->register('class_to_file.class_to_file', function (Container $container) {
$classToFiles = [];
foreach ($container->get('composer.class_loaders') as $classLoader) {
$classToFiles[] = new ComposerClassToFile($classLoader);
}
return new ChainClassToFile($classToFiles);
});
$container->register('class_to_file.file_to_class', function (Container $container) {
$fileToClasses = [];
foreach ($container->get('composer.class_loaders') as $classLoader) {
$fileToClasses[] = new ComposerFileToClass($classLoader);
}
return new ChainFileToClass($fileToClasses);
});
}
private function registerClassMover(Container $container)
{
$container->register('class_mover.class_mover', function (Container $container) {
return new ClassMover();
});
}
private function registerSourceCodeFilesystem(Container $container)
{
$container->register('source_code_filesystem.git', function (Container $container) {
return new GitFilesystem(FilePath::fromString($container->getParameter('cwd')));
});
$container->register('source_code_filesystem.simple', function (Container $container) {
return new SimpleFilesystem(FilePath::fromString($container->getParameter('cwd')));
});
$container->register('source_code_filesystem.composer', function (Container $container) {
$providers = [];
$cwd = FilePath::fromString($container->getParameter('cwd'));
foreach ($container->get('composer.class_loaders') as $classLoader) {
$providers[] = new ComposerFileListProvider($cwd, $classLoader);
}
return new SimpleFilesystem($cwd, new ChainFileListProvider($providers));
});
}
private function registerTypeInference(Container $container)
{
$container->register('type_inference.source_code_loader', function (Container $container) {
return new ClassToFileSourceCodeLoader($container->get('class_to_file.converter'));
});
$container->register('type_inference.type_inference', function (Container $container) {
return new TolerantTypeInferer(null, new WorseMemberTypeResolver(
new Reflector(
new WorseSourceCodeLocator(
$container->get('type_inference.source_code_loader'))
)
)
);
});
}
private function registerApplicationServices(Container $container)
{
$container->register('application.class_mover', function (Container $container) {
return new ClassMoverApp(
$container->get('class_to_file.converter'),
$container->get('class_mover.class_mover'),
$container->get('source_code_filesystem.git')
);
});
$container->register('application.class_copy', function (Container $container) {
return new ClassCopy(
$container->get('class_to_file.converter'),
$container->get('class_mover.class_mover'),
$container->get('source_code_filesystem.git')
);
});
$container->register('application.file_info', function (Container $container) {
return new FileInfo(
$container->get('type_inference.type_inference'),
$container->get('class_to_file.converter'),
$container->get('source_code_filesystem.simple')
);
});
$container->register('application.file_info_at_offset', function (Container $container) {
return new FileInfoAtOffset(
$container->get('type_inference.type_inference'),
$container->get('class_to_file.converter'),
$container->get('source_code_filesystem.simple')
);
});
$container->register('application.class_search', function (Container $container) {
return new ClassSearch(
$container->get('source_code_filesystem.composer'),
$container->get('class_to_file.converter')
);
});
}
}
<file_sep>/lib/Application/FileInfo.php
<?php
namespace Phpactor\Application;
use DTL\TypeInference\TypeInference;
use DTL\TypeInference\Domain\Offset;
use DTL\TypeInference\Domain\SourceCode;
use DTL\Filesystem\Domain\Filesystem;
use DTL\ClassFileConverter\Domain\FilePath;
use DTL\ClassFileConverter\Domain\ClassName;
use DTL\ClassFileConverter\Domain\ClassToFile;
use DTL\TypeInference\Domain\InferredType;
use DTL\TypeInference\Domain\TypeInferer;
use DTL\ClassFileConverter\Domain\FileToClass;
final class FileInfo
{
/**
* @var TypeInference
*/
private $inference;
/**
* @var FileToClass
*/
private $classToFileConverter;
/**
* @var Filesystem
*/
private $filesystem;
public function __construct(
TypeInferer $inference,
FileToClass $classToFileConverter,
Filesystem $filesystem
)
{
$this->inference = $inference;
$this->classToFileConverter = $classToFileConverter;
$this->filesystem = $filesystem;
}
public function infoForFile(string $sourcePath)
{
$path = $this->filesystem->createPath($sourcePath);
$classCandidates = $this->classToFileConverter->fileToClassCandidates(FilePath::fromString((string) $path));
$return = [
'class' => null,
'class_name' => null,
'class_namespace' => null,
];
if ($classCandidates->noneFound()) {
return $return;
}
$best = $classCandidates->best();
return [
'class' => (string) $best,
'class_name' => $best->name(),
'class_namespace' => $best->namespace(),
];
}
}
<file_sep>/lib/UserInterface/Console/Command/FileInfoCommand.php
<?php
namespace Phpactor\UserInterface\Console\Command;
use Symfony\Component\Console\Command\Command;
use Symfony\Component\Console\Input\InputInterface;
use Symfony\Component\Console\Output\OutputInterface;
use Phpactor\Application\ClassInformationForOffsetr\ClassInformationForOffsetr;
use Symfony\Component\Console\Input\InputArgument;
use Phpactor\Phpactor;
use Phpactor\UserInterface\Console\Logger\SymfonyConsoleInformationForOffsetLogger;
use Symfony\Component\Console\Input\InputOption;
use Phpactor\Application\FileInfo;
class FileInfoCommand extends Command
{
private $infoForOffset;
public function __construct(
FileInfo $infoForOffset
) {
parent::__construct();
$this->infoForOffset = $infoForOffset;
}
public function configure()
{
$this->setName('file:info');
$this->setDescription('Return information about given file');
$this->addArgument('path', InputArgument::REQUIRED, 'Source path or FQN');
Handler\FormatHandler::configure($this);
}
public function execute(InputInterface $input, OutputInterface $output)
{
$info = $this->infoForOffset->infoForFile(
$input->getArgument('path')
);
$format = $input->getOption('format');
switch ($format) {
case Handler\FormatHandler::FORMAT_JSON:
$output->write(json_encode($info));
return;
case Handler\FormatHandler::FORMAT_CONSOLE:
return $this->outputConsole($output, $info);
}
throw new \InvalidArgumentException(sprintf(
'Invalid format "%s", known formats: "%s"',
$format, implode('", "', Handler\FormatHandler::VALID_FORMATS)
));
}
private function outputConsole(OutputInterface $output, array $info)
{
foreach ($info as $key => $value) {
$output->writeln(sprintf(
'<comment>%s</comment>:%s', $key, $value
));
}
}
}
<file_sep>/plugin/README.md
Phpactor VIM Plugin
===================
Installation
------------
Using Vundle:
```
Plugin 'dantleech/phpactor'
```
and then you will need to composer install:
```bash
$ cd ~/.vim/bundles/phpactor
$ composer install
```
Example key mappings
--------------------
```
" Include use statement
nnoremap <silent><leader>u :call PhactUseAdd()<CR>
nnoremap <silent><Leader>o :call PhactGotoDefinition()<CR>
" Show information about "type" under cursor including current frame
nnoremap <silent><Leader>i :call PhactOffsetTypeInfo()<CR>
```
Include use statement
---------------------
Will attempt to include the use statement for the word (class name) under the
cursor:
```
: call PhactUseAdd()
```
Goto Definition
---------------
Will attempt to go to the definition of the word (class name) under the
cursor:
```
: call PhactGotoDefinition()
```
<file_sep>/README.md
Phpactor
========
[](https://travis-ci.org/dantleech/phpactor)
This project aims to provide heavy-lifting *refactoring* and *introspection*
tools which can be used with editors such as VIM.
The tool is currently limited to working with projects using **GIT** and **Composer**.
It is currently under development.
VIM Plugin
----------
This application standalone, but it does include a plugin for VIM, see the
[plugin README](https://github.com/dantleech/phpactor/tree/master/plugin/README.md).
Configuration
-------------
Configuration files are merged from the XDG open base dir standard, and from
the current working directory, for example:
```
/etc/xdg/phpactor/phpactor.yml
/home/daniel/.config/phpactor/phpactor.yml
<currnent directory>/.phpactor.yml
```
All configuration options are defined in the
`Phpactor\Container\CoreExtension` class.
Commands
--------
### Move classes
All of the examples below will move the class and update all references in the
source code to it.
Move the single class from one path to another:
```bash
$ phpactor class:move lib/Path/To/MyClass.php lib/NewLocation.php
```
Relocate all classes under `Acme` to `Foobar`:
```bash
$ phpactor class:move lib/Acme lib/Foobar
```
Relocate all classes in the `lib` directory to a new subfolder:
```bash
$ phpactor class:move lib/* lib/Core
```
Move a class by name:
```bash
$ phpactor class:move "Acme\\BlogPost" "Acme\\Article"
```

- Moves individual class *files* or *directories*.
- Move by fully qualified class name of file path.
- Updates references for all moved classes in currently **GIT tree**.
- Use statements are updated or added when required.
### Copy classes
As with move, except only update the class names of the copied class(es).
```bash
$ phpactor class:copy lib/Path/To/MyClass.php lib/Path/To/CopyOfMyClass.php
$ cat lib/Path/To/CopyOfMyClass.php | grep class
class CopyOfMyClass
```
### Class Search
Search for a class by its (short) name and return a list of fully qualified
names => absolute paths.
```bash
./bin/phpactor class:search Filesystem
DTL\Filesystem\Domain\Filesystem:/.../vendor/dantleech/source-code-filesystem/lib/Domain/Filesystem.php
Symfony\Component\Filesystem\Filesystem:/.../vendor/symfony/filesystem/Filesystem.php
```
Also returns JSON with `--format=json`
### Information at offset
Return the fully qualified name of the class at the offset in the given file:
```php
$ phpactor offset:info lib/Application/InformationForOffset/InformationForOffset.php 1382
type:DTL\ClassFileConverter\ClassName
path:/.../vendor/dtl/class-to-file/lib/ClassName.php
```
Also returns JSON with `--format=json`
Child Libraries
---------------
It will package libraries in separate, decoupled libraries
- [dantleech/class-to-file](https://github.com/dantleech/class-to-file): Convert files to class names and vice-versa.
- [dantleech/class-mover](https://github.com/dantleech/class-mover): Find and update class references.
- [dantleech/source-code-filesystem](https://github.com/dantleech/source-code-filesystem): Find and manage source code files.
- [dantleech/type-inference](https://github.com/dantleech/type-inference): Determine type of thing at a given offset (very limited actually).
- [dantleech/worse-reflection](https://github.com/dantleech/worse-reflection): Lightweight class reflection API
About this project
------------------
Previous attempts at writing this tool involved building a single
"intellisense" core which is a database containing meta information about all
the code in the project. Whilst this approach would provide a solid core, it
is also very ambitious and offers little immediate payback (and continuing
instability) to justify the investment in time required.
The approach taken this time around is to design libraries which do *one thing
and do one thing well*.
Some of the these libraries (e.g. class mover) may later be superceded by a
"reference replacer" which handles, f.e. refactoring method names. But as the
pre-requisite for this is writing an type inference system, it may be a long
way off, whereas replacing class references is comparatively easy and provides
a large value return in a short amount of time.
Other libraries are more generic (e.g. class to file, source code filesystem)
and can be used by other future libraries (e.g. go-to-definition,
use-statement include, snippet generation, interface implementation,
intellisense database...).
None of the child libraries are currently intended to be consumed
independently and cannot be found on packagist.
<file_sep>/lib/UserInterface/Console/Command/FileInfoAtOffsetCommand.php
<?php
namespace Phpactor\UserInterface\Console\Command;
use Symfony\Component\Console\Command\Command;
use Symfony\Component\Console\Input\InputInterface;
use Symfony\Component\Console\Output\OutputInterface;
use Phpactor\Application\ClassInformationForOffsetr\ClassInformationForOffsetr;
use Symfony\Component\Console\Input\InputArgument;
use Phpactor\Phpactor;
use Phpactor\UserInterface\Console\Logger\SymfonyConsoleInformationForOffsetLogger;
use Symfony\Component\Console\Input\InputOption;
use Phpactor\Application\FileInfoAtOffset;
class FileInfoAtOffsetCommand extends Command
{
const PADDING = ' ';
/**
* @var FileInfoAtOffset
*/
private $infoForOffset;
public function __construct(
FileInfoAtOffset $infoForOffset
) {
parent::__construct();
$this->infoForOffset = $infoForOffset;
}
public function configure()
{
$this->setName('file:offset');
$this->setDescription('Return information about given file at the given offset');
$this->addArgument('path', InputArgument::REQUIRED, 'Source path or FQN');
$this->addArgument('offset', InputArgument::REQUIRED, 'Destination path or FQN');
$this->addOption('frame', null, InputOption::VALUE_NONE, 'Show inferred frame state at offset');
Handler\FormatHandler::configure($this);
}
public function execute(InputInterface $input, OutputInterface $output)
{
$info = $this->infoForOffset->infoForOffset(
$input->getArgument('path'),
$input->getArgument('offset'),
$input->getOption('frame')
);
$format = $input->getOption('format');
switch ($format) {
case Handler\FormatHandler::FORMAT_JSON:
$output->write(json_encode($info));
return;
case Handler\FormatHandler::FORMAT_CONSOLE:
return $this->outputConsole($output, $info);
}
throw new \InvalidArgumentException(sprintf(
'Invalid format "%s", known formats: "%s"',
$format, implode('", "', Handler\FormatHandler::VALID_FORMATS)
));
}
private function outputConsole(OutputInterface $output, array $info, int $padding = 0)
{
switch ($padding) {
case 1:
$style = 'info';
break;
default:
$style = 'comment';
}
foreach ($info as $key => $value) {
if (is_array($value)) {
$output->writeln(sprintf('%s<%s>%s</>:', str_repeat(self::PADDING, $padding), $style, $key));
$this->outputConsole($output, $value, ++$padding);
$padding--;
continue;
}
$output->writeln(sprintf(
'%s<%s>%s</>:%s', str_repeat(self::PADDING, $padding), $style, $key, $value
));
}
}
}
<file_sep>/lib/Application/ClassCopy.php
<?php
namespace Phpactor\Application;
use DTL\ClassFileConverter\Domain\ClassName;
use DTL\ClassFileConverter\Domain\ClassToFileFileToClass;
use DTL\ClassFileConverter\Domain\FilePath as ConverterFilePath;
use DTL\ClassMover\ClassMover as ClassMoverFacade;
use DTL\ClassMover\Domain\FullyQualifiedName;
use DTL\Filesystem\Domain\FilePath;
use DTL\Filesystem\Domain\Filesystem;
use Phpactor\Application\ClassCopy\MoveOperation;
use Phpactor\Phpactor;
use Webmozart\Glob\Glob;
use Webmozart\PathUtil\Path;
use Phpactor\Application\Logger\ClassCopyLogger;
use DTL\Filesystem\Domain\CopyReport;
class ClassCopy
{
private $fileClassConverter;
private $classMover;
private $filesystem;
// rename compositetransformer => classToFileConverter
public function __construct(
ClassToFileFileToClass $fileClassConverter,
ClassMoverFacade $classMover,
Filesystem $filesystem
) {
$this->fileClassConverter = $fileClassConverter;
$this->filesystem = $filesystem;
$this->classMover = $classMover;
}
/**
* Move - guess if moving by class name or file.
*/
public function copy(ClassCopyLogger $logger, string $src, string $dest)
{
$srcPath = $src;
$destPath = $dest;
if (false === Phpactor::isFile($src)) {
$srcPathCandidates = $this->fileClassConverter->classToFileCandidates(ClassName::fromString($src));
if (false === $srcPathCandidates->noneFound()) {
$srcPath = (string) $srcPathCandidates->best();
}
}
if (false === Phpactor::isFile($dest)) {
$destPathCandidates = $this->fileClassConverter->classToFileCandidates(ClassName::fromString($dest));
if (false === $destPathCandidates->noneFound()) {
$destPath = (string) $destPathCandidates->best();
}
}
return $this->copyFile($logger, $srcPath, $destPath);
}
public function copyClass(ClassCopyLogger $logger, string $srcName, string $destName)
{
return $this->copyFile(
$logger,
(string) $this->fileClassConverter->classToFileCandidates(ClassName::fromString($srcName))->best(),
(string) $this->fileClassConverter->classToFileCandidates(ClassName::fromString($destName))->best()
);
}
public function copyFile(ClassCopyLogger $logger, string $srcPath, string $destPath)
{
$srcPath = Phpactor::normalizePath($srcPath);
foreach (Glob::glob($srcPath) as $globPath) {
$globDest = $destPath;
// if the src is not the same as the globbed src, then it is a wildcard
// and we want to append the filename to the destination
if ($srcPath !== $globPath) {
$globDest = Path::join($destPath, Path::getFilename($globPath));
}
try {
$this->doCopyFile($logger, $globPath, $globDest);
} catch (\Exception $e) {
throw new \RuntimeException(sprintf('Could not copy file "%s" to "%s"', $srcPath, $destPath), null, $e);
}
}
}
private function doCopyFile(ClassCopyLogger $logger, string $srcPath, string $destPath)
{
$destPath = Phpactor::normalizePath($destPath);
$srcPath = $this->filesystem->createPath($srcPath);
$destPath = $this->filesystem->createPath($destPath);
$report = $this->filesystem->copy($srcPath, $destPath);
$this->updateReferences($logger, $report);
$logger->copying($srcPath, $destPath);
}
private function updateReferences(ClassCopyLogger $logger, CopyReport $copyReport)
{
foreach ($copyReport->srcFiles() as $srcPath) {
$destPath = $copyReport->destFiles()->current();
$srcClassName = $this->fileClassConverter->fileToClassCandidates(
ConverterFilePath::fromString($srcPath->path())
)->best();
$destClassName = $this->fileClassConverter->fileToClassCandidates(
ConverterFilePath::fromString($destPath->path())
)->best();
$references = $this->classMover->findReferences($this->filesystem->getContents($srcPath), $srcClassName);
$logger->replacing($destPath, $references, FullyQualifiedName::fromString($destClassName));
$source = $this->classMover->replaceReferences(
$references,
$destClassName
);
$this->filesystem->writeContents($destPath, (string) $source);
$copyReport->destFiles()->next();
}
}
}
|
49403e7fd22cfb51e0f7ead17d612fc758425704
|
[
"Markdown",
"PHP"
] | 8 |
PHP
|
ErrEoE/phpactor
|
c8ae5d61b71ac67b7ce1f4d7424e326da81f88bd
|
644a03930ba9438281d7c24b30b72cc839194bd8
|
refs/heads/master
|
<repo_name>Bearded-Baguette/Tetris<file_sep>/Tetris/Board.cs
using System;
using System.Collections.Generic;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
namespace Tetris
{
public class Board
{
Panel[,] boardArray = new Panel[12, 23];
Panel[,] demoBoard = new Panel[4, 4];
int panelWH = 20;
// Creates the actual game board
public void createBoard(Form TetrisForm)
{
for (int i = 0; i < 12; i++)
{
for (int k = 0; k < 23; k++)
{
Point pntCurrent = new Point(20 + (i * panelWH), 20 + (k * panelWH));
createBlock(TetrisForm, i, k, pntCurrent);
}
}
}
// Creates the border for the game
public void setBorder()
{
for (int i = 0; i < 12; i++)
{
for (int k = 0; k < 23; k++)
{
if(i == 0 || i == 11 || k == 22 || k == 0)
{
boardArray[i, k].BackColor = Color.Black;
fixedBlocks.Add(boardArray[i,k]);
}
}
}
placePieceInList();
}
// Create each panel of the gameboard
private void createBlock(Form TetrisForm, int i, int k, Point pntCurrent)
{
boardArray[i, k] = new Panel();
boardArray[i, k].Enabled = true;
boardArray[i, k].Visible = true;
boardArray[i, k].BackColor = Color.Gray;
boardArray[i, k].Size = new System.Drawing.Size(panelWH, panelWH);
boardArray[i, k].Location = pntCurrent;
boardArray[i, k].BorderStyle = System.Windows.Forms.BorderStyle.FixedSingle;
TetrisForm.Controls.Add(boardArray[i, k]);
}
// Specifies the location of a panel in the demo board
public void createNextPieceBoard(Form TetrisForm)
{
for (int i = 0; i < 4; i++)
{
for (int k = 0; k < 4; k++)
{
Point newPoint = new Point(354 + (i * panelWH), 42 + (k * panelWH));
createDemoBlock(TetrisForm, i, k, newPoint);
}
}
}
// Creates a panel for the demo board
private void createDemoBlock(Form TetrisForm, int i, int k, Point pntCurrent)
{
demoBoard[i, k] = new Panel();
demoBoard[i, k].Enabled = true;
demoBoard[i, k].Visible = true;
demoBoard[i, k].BackColor = Color.White;
demoBoard[i, k].Size = new System.Drawing.Size(panelWH, panelWH);
demoBoard[i, k].Location = pntCurrent;
TetrisForm.Controls.Add(demoBoard[i, k]);
}
private Point activePoint;
private int currentPiece;
private int rotation;
private int totalScore = 0;
Tetromino tetromino;
List<List<Point>> rotations;
List<Panel> fixedBlocks = new List<Panel>();
Color pieceColor;
private Queue<int> nextPieces = new Queue<int>();
// places random pieces in the list of next pieces to be dropped
public void placePieceInList()
{
Random rnd = new Random();
activePoint = new Point(5, 1);
rotation = 0;
if (nextPieces.Count == 0 || nextPieces.Count == 1)
{
for (int i = 0; i <= 5; i++)
{
nextPieces.Enqueue(rnd.Next(1, 100) % 7);
}
}
currentPiece = nextPieces.Dequeue();
}
// creates the next piece on the board
public void createInitialPiece()
{
tetromino = new Tetromino(currentPiece);
rotations = tetromino.getRotations();
pieceColor = tetromino.getColor();
foreach(Point p in rotations[rotation])
{
boardArray[p.X + activePoint.X, p.Y + activePoint.Y].BackColor = pieceColor;
if (fixedBlocks.Contains(boardArray[p.X + activePoint.X, p.Y + activePoint.Y]))
{
gameOver();
}
}
showNextPiece();
}
// Creates the next piece in the demoBoard
public void showNextPiece()
{
Tetromino nextTetromino = new Tetromino(nextPieces.Peek());
List<List<Point>> nextRotations = nextTetromino.getRotations();
Color nextPieceColor = nextTetromino.getColor();
for(int i=0; i<4;i++)
{
for(int j=0; j<4; j++)
{
demoBoard[j, i].BackColor = Color.White;
demoBoard[j, i].BorderStyle = System.Windows.Forms.BorderStyle.FixedSingle;
//demoBoard[j, i].BorderStyle = System.Windows.Forms.BorderStyle.None;
}
}
foreach(Point p in nextRotations[0])
{
demoBoard[p.X, p.Y].BackColor = nextPieceColor;
demoBoard[p.X, p.Y].BorderStyle = System.Windows.Forms.BorderStyle.FixedSingle;
}
}
// Returns true if there is a collision between the current piece and a fixed piece
public Boolean collisionAt(int x, int y, int rotation)
{
foreach(Point p in rotations[rotation])
{
if((p.Y + y) > 21)
{
return true;
}
if(fixedBlocks.Contains(boardArray[p.X + x, p.Y + y]))
{
return true;
}
}
return false;
}
// Moves the active piece left or right by one space
public void moveLeftOrRight(int i)
{
if (!collisionAt(activePoint.X + i, activePoint.Y, rotation))
{
activePoint.X += i;
if(i == 1)
repaintRightMovement(activePoint.X, activePoint.Y, rotation);
if (i == -1)
repaintLeftMovement(activePoint.X, activePoint.Y, rotation);
}
}
// Moves the active piece down one space
public void dropDownOneSpace()
{
if (!collisionAt(activePoint.X, activePoint.Y + 1, rotation))
{
activePoint.Y += 1;
repaintDroppingPiece(activePoint.X, activePoint.Y, rotation);
}
else
{
fixPiece();
}
}
// Rotates the active piece
public void rotatePiece()
{
int nextRotation = rotation + 1;
if (nextRotation == rotations.Count)
{
nextRotation = 0;
}
if (!collisionAt(activePoint.X, activePoint.Y, nextRotation))
{
foreach (Point p in rotations[rotation])
{
boardArray[p.X + activePoint.X, p.Y + activePoint.Y].BackColor = Color.Gray;
}
rotation += 1;
if (rotation == rotations.Count)
{
rotation = 0;
}
foreach (Point p in rotations[rotation])
{
boardArray[p.X + activePoint.X, p.Y + activePoint.Y].BackColor = pieceColor;
}
}
}
// Adds the active piece to a fixedPiece list
public void fixPiece()
{
foreach (Point p in rotations[rotation])
{
fixedBlocks.Add(boardArray[activePoint.X + p.X, activePoint.Y + p.Y]);
}
clearRows();
placePieceInList();
createInitialPiece();
}
// Checks for a full row
public void clearRows()
{
bool fullRow;
int numberOfDeletedRows = 0;
for(int i = 21; i>0;i--)
{
fullRow = true;
for(int j = 1; j<11; j++)
{
if(boardArray[j,i].BackColor == Color.Gray)
{
fullRow = false;
break;
}
}
if(fullRow)
{
deleteRow(i);
i += 1;
numberOfDeletedRows++;
}
}
if (numberOfDeletedRows == 1)
totalScore += 100;
else if (numberOfDeletedRows == 2)
totalScore += 300;
else if (numberOfDeletedRows == 3)
totalScore += 600;
else if (numberOfDeletedRows == 4)
totalScore += 1000;
}
// Deletes a full row
public void deleteRow(int row)
{
int numberOfDeletedRow = 0;
for(int i = row-1; i > 0; i--)
{
for (int j = 1; j < 11; j++)
{
boardArray[j, (i + 1)].BackColor = boardArray[j, i].BackColor;
if (boardArray[j, (i + 1)].BackColor == Color.Gray)
{
fixedBlocks.Remove(boardArray[j, i+1]);
}
}
}
}
// Redraws a piece moving down
public void repaintDroppingPiece(int x, int y, int rotation)
{
foreach (Point p in rotations[rotation])
{
boardArray[p.X + x, p.Y + (y-1)].BackColor = Color.Gray;
}
foreach (Point p in rotations[rotation])
{
boardArray[p.X + x, p.Y + y].BackColor = pieceColor;
}
}
// Redraws a piece moving to the left
public void repaintLeftMovement(int x, int y, int rotation)
{
foreach (Point p in rotations[rotation])
{
boardArray[p.X + (x + 1), p.Y + y].BackColor = Color.Gray;
}
foreach (Point p in rotations[rotation])
{
boardArray[p.X + x, p.Y + y].BackColor = pieceColor;
}
}
// Redraws a piece moving to the right
public void repaintRightMovement(int x, int y, int rotation)
{
foreach (Point p in rotations[rotation])
{
boardArray[p.X + (x - 1), p.Y + y].BackColor = Color.Gray;
}
foreach (Point p in rotations[rotation])
{
boardArray[p.X + x, p.Y + y].BackColor = pieceColor;
}
}
// Game over. Is true if a fixed piece is in the top row.
public bool gameOver()
{
for (int i = 1; i < 11; i++)
{
if (fixedBlocks.Contains(boardArray[i, 1]))
{
return true;
}
}
return false;
}
public string getTotalScore()
{
return totalScore.ToString();
}
}
}
<file_sep>/Tetris/Form1.cs
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using System.Windows.Forms;
namespace Tetris
{
public partial class TetrisForm : Form
{
public TetrisForm()
{
InitializeComponent();
}
Board b = new Board();
private void Form1_Load(object sender, EventArgs e)
{
exitButton.Visible = false;
gameOverLabel.Visible = false;
b.createBoard(this);
b.createNextPieceBoard(this);
b.setBorder();
b.createInitialPiece();
Thread t = new Thread(ticker);
t.Start();
}
protected override bool ProcessCmdKey(ref Message msg, Keys keyData)
{
if (keyData == Keys.Left)
{
b.moveLeftOrRight(-1);
}
if(keyData == Keys.Right)
{
b.moveLeftOrRight(1);
}
if(keyData == Keys.Down)
{
b.dropDownOneSpace();
}
if(keyData == Keys.Up)
{
b.rotatePiece();
}
return base.ProcessCmdKey(ref msg, keyData);
}
void setScoreLabel()
{
scoreLabel.Invoke((MethodInvoker)(() => scoreLabel.Text = b.getTotalScore()));
}
void setGameOverScreen()
{
gameOverLabel.Invoke((MethodInvoker)(() => gameOverLabel.Visible = true));
exitButton.Invoke((MethodInvoker)(() => exitButton.Visible = true));
}
void ticker()
{
while (!b.gameOver())
{
setScoreLabel();
Thread.Sleep(1000);
b.dropDownOneSpace();
}
setGameOverScreen();
}
private void exitButton_Click(object sender, EventArgs e)
{
Application.Exit();
}
}
}
<file_sep>/README.md
# Tetris
A tetris game made in C#
This project was created for a final project. Please don't copy it. That's a guaranteed way to get kicked out of school.
<file_sep>/Tetris/Tetromino.cs
using System;
using System.Collections.Generic;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Tetris
{
class Tetromino
{
List<List<Point>> Rotations;
Color pieceColor;
public Tetromino(int i)
{
Rotations = new List<List<Point>>();
switch (i)
{
// I-piece
case 0:
pieceColor = Color.Blue;
Rotations.Add(new List<Point>());
Rotations[0].Add(new Point(0, 0));
Rotations[0].Add(new Point(1, 0));
Rotations[0].Add(new Point(2, 0));
Rotations[0].Add(new Point(3, 0));
Rotations.Add(new List<Point>());
Rotations[1].Add(new Point(1, 3));
Rotations[1].Add(new Point(1, 2));
Rotations[1].Add(new Point(1, 1));
Rotations[1].Add(new Point(1, 0));
break;
// Square piece
case 1:
pieceColor = Color.Cyan;
Rotations.Add(new List<Point>());
Rotations[0].Add(new Point(0, 1));
Rotations[0].Add(new Point(1, 1));
Rotations[0].Add(new Point(0, 0));
Rotations[0].Add(new Point(1, 0));
break;
// L-piece
case 2:
pieceColor = Color.Orange;
Rotations.Add(new List<Point>());
Rotations[0].Add(new Point(0, 1));
Rotations[0].Add(new Point(0, 0));
Rotations[0].Add(new Point(1, 0));
Rotations[0].Add(new Point(2, 0));
Rotations.Add(new List<Point>());
Rotations[1].Add(new Point(1, 2));
Rotations[1].Add(new Point(0, 2));
Rotations[1].Add(new Point(0, 1));
Rotations[1].Add(new Point(0, 0));
Rotations.Add(new List<Point>());
Rotations[2].Add(new Point(0, 1));
Rotations[2].Add(new Point(1, 1));
Rotations[2].Add(new Point(2, 1));
Rotations[2].Add(new Point(2, 0));
Rotations.Add(new List<Point>());
Rotations[3].Add(new Point(1, 2));
Rotations[3].Add(new Point(1, 1));
Rotations[3].Add(new Point(0, 0));
Rotations[3].Add(new Point(1, 0));
break;
// J-piece
case 3:
pieceColor = Color.Yellow;
Rotations.Add(new List<Point>());
Rotations[0].Add(new Point(2, 1));
Rotations[0].Add(new Point(0, 0));
Rotations[0].Add(new Point(1, 0));
Rotations[0].Add(new Point(2, 0));
Rotations.Add(new List<Point>());
Rotations[1].Add(new Point(0, 2));
Rotations[1].Add(new Point(0, 1));
Rotations[1].Add(new Point(0, 0));
Rotations[1].Add(new Point(1, 0));
Rotations.Add(new List<Point>());
Rotations[2].Add(new Point(2, 1));
Rotations[2].Add(new Point(1, 1));
Rotations[2].Add(new Point(0, 1));
Rotations[2].Add(new Point(0, 0));
Rotations.Add(new List<Point>());
Rotations[3].Add(new Point(0, 2));
Rotations[3].Add(new Point(1, 2));
Rotations[3].Add(new Point(1, 1));
Rotations[3].Add(new Point(1, 0));
break;
// T-piece
case 4:
pieceColor = Color.Green;
Rotations.Add(new List<Point>());
Rotations[0].Add(new Point(1, 1));
Rotations[0].Add(new Point(0, 0));
Rotations[0].Add(new Point(1, 0));
Rotations[0].Add(new Point(2, 0));
Rotations.Add(new List<Point>());
Rotations[1].Add(new Point(0, 2));
Rotations[1].Add(new Point(0, 1));
Rotations[1].Add(new Point(1, 1));
Rotations[1].Add(new Point(0, 0));
Rotations.Add(new List<Point>());
Rotations[2].Add(new Point(0, 1));
Rotations[2].Add(new Point(1, 1));
Rotations[2].Add(new Point(2, 1));
Rotations[2].Add(new Point(1, 0));
Rotations.Add(new List<Point>());
Rotations[3].Add(new Point(1, 2));
Rotations[3].Add(new Point(1, 1));
Rotations[3].Add(new Point(0, 1));
Rotations[3].Add(new Point(1, 0));
break;
// S-piece
case 5:
pieceColor = Color.Pink;
Rotations.Add(new List<Point>());
Rotations[0].Add(new Point(0, 1));
Rotations[0].Add(new Point(1, 1));
Rotations[0].Add(new Point(1, 0));
Rotations[0].Add(new Point(2, 0));
Rotations.Add(new List<Point>());
Rotations[1].Add(new Point(1, 2));
Rotations[1].Add(new Point(0, 1));
Rotations[1].Add(new Point(1, 1));
Rotations[1].Add(new Point(0, 0));
break;
// Z-piece
case 6:
pieceColor = Color.Red;
Rotations.Add(new List<Point>());
Rotations[0].Add(new Point(1, 1));
Rotations[0].Add(new Point(2, 1));
Rotations[0].Add(new Point(0, 0));
Rotations[0].Add(new Point(1, 0));
Rotations.Add(new List<Point>());
Rotations[1].Add(new Point(0, 2));
Rotations[1].Add(new Point(1, 1));
Rotations[1].Add(new Point(0, 1));
Rotations[1].Add(new Point(1, 0));
break;
}
}
public List<List<Point>> getRotations()
{
return this.Rotations;
}
public Color getColor()
{
return pieceColor;
}
}
}
|
584696640fd64e46f887411a497cf29c22459d65
|
[
"Markdown",
"C#"
] | 4 |
C#
|
Bearded-Baguette/Tetris
|
00bafa0a6a829597a4892d33afb008d785ba8361
|
39502e1e5489c1b5534a53c58e485a60d303d239
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.